合并声网代码
This commit is contained in:
5
lib_faceunity/src/main/AndroidManifest.xml
Normal file
5
lib_faceunity/src/main/AndroidManifest.xml
Normal file
@@ -0,0 +1,5 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<manifest xmlns:tools="http://schemas.android.com/tools"
|
||||
>
|
||||
<uses-sdk tools:overrideLibrary="io.agora.beautyapi.faceunity" />
|
||||
</manifest>
|
||||
@@ -0,0 +1,179 @@
|
||||
/*
|
||||
* MIT License
|
||||
*
|
||||
* Copyright (c) 2023 Agora Community
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
|
||||
package io.agora.beautyapi.faceunity
|
||||
|
||||
import android.content.Context
|
||||
import android.view.View
|
||||
import com.faceunity.core.faceunity.FURenderKit
|
||||
import io.agora.base.VideoFrame
|
||||
import io.agora.rtc2.Constants
|
||||
import io.agora.rtc2.RtcEngine
|
||||
|
||||
const val VERSION = "1.0.6"
|
||||
|
||||
enum class CaptureMode{
|
||||
Agora, // 使用声网内部的祼数据接口进行处理
|
||||
Custom // 自定义模式,需要自己调用onFrame接口将原始视频帧传给BeautyAPI做处理
|
||||
}
|
||||
|
||||
interface IEventCallback{
|
||||
|
||||
/**
|
||||
* 统计数据回调,每处理完一帧后会回调一次
|
||||
*
|
||||
* @param stats 美颜统计数据
|
||||
*/
|
||||
fun onBeautyStats(stats: BeautyStats)
|
||||
}
|
||||
|
||||
data class BeautyStats(
|
||||
val minCostMs:Long, // 统计区间内的最小值
|
||||
val maxCostMs: Long, // 统计区间内的最大值
|
||||
val averageCostMs: Long // 统计区间内的平均值
|
||||
)
|
||||
|
||||
enum class MirrorMode {
|
||||
|
||||
// 没有镜像正常画面的定义:前置拍到画面和手机看到画面是左右不一致的,后置拍到画面和手机看到画面是左右一致的
|
||||
|
||||
MIRROR_LOCAL_REMOTE, //本地远端都镜像,前置默认,本地和远端贴纸都正常
|
||||
MIRROR_LOCAL_ONLY, // 仅本地镜像,远端不镜像,,远端贴纸正常,本地贴纸镜像。用于打电话场景,电商直播场景(保证电商直播后面的告示牌文字是正的);这种模式因为本地远端是反的,所以肯定有一边的文字贴纸方向会是反的
|
||||
MIRROR_REMOTE_ONLY, // 仅远端镜像,本地不镜像,远端贴纸正常,本地贴纸镜像
|
||||
MIRROR_NONE // 本地远端都不镜像,后置默认,本地和远端贴纸都正常
|
||||
}
|
||||
|
||||
data class CameraConfig(
|
||||
val frontMirror: MirrorMode = MirrorMode.MIRROR_LOCAL_REMOTE, // 前置默认镜像:本地远端都镜像
|
||||
val backMirror: MirrorMode = MirrorMode.MIRROR_NONE // 后置默认镜像:本地远端都不镜像
|
||||
)
|
||||
|
||||
data class Config(
|
||||
val context: Context, // Android Context 上下文
|
||||
val rtcEngine: RtcEngine, // 声网Rtc引擎
|
||||
val fuRenderKit: FURenderKit, // 美颜SDK处理句柄
|
||||
val eventCallback: IEventCallback? = null, // 事件回调
|
||||
val captureMode: CaptureMode = CaptureMode.Agora, // 处理模式
|
||||
val statsDuration: Long = 1000, // 统计区间
|
||||
val statsEnable: Boolean = false, // 是否开启统计
|
||||
val cameraConfig: CameraConfig = CameraConfig() // 摄像头镜像配置
|
||||
)
|
||||
|
||||
enum class ErrorCode(val value: Int) {
|
||||
ERROR_OK(0), // 一切正常
|
||||
ERROR_HAS_NOT_INITIALIZED(101), // 没有调用Initialize或调用失败情况下调用了其他API
|
||||
ERROR_HAS_INITIALIZED(102), // 已经Initialize成功后再次调用报错
|
||||
ERROR_HAS_RELEASED(103), // 已经调用release销毁后还调用其他API
|
||||
ERROR_PROCESS_NOT_CUSTOM(104), // 非Custom处理模式下调用onFrame接口从外部传入视频帧
|
||||
ERROR_VIEW_TYPE_ERROR(105), // 当调用setupLocalVideo时view类型错误时返回
|
||||
ERROR_FRAME_SKIPPED(106), // 当处理帧忽略时在onFrame返回
|
||||
}
|
||||
|
||||
enum class BeautyPreset {
|
||||
CUSTOM, // 不使用推荐的美颜参数
|
||||
DEFAULT // 默认的
|
||||
}
|
||||
|
||||
fun createFaceUnityBeautyAPI(): FaceUnityBeautyAPI = FaceUnityBeautyAPIImpl()
|
||||
|
||||
interface FaceUnityBeautyAPI {
|
||||
|
||||
/**
|
||||
* 初始化API
|
||||
*
|
||||
* @param config 配置参数
|
||||
* @return 见ErrorCode
|
||||
*/
|
||||
fun initialize(config: Config): Int
|
||||
|
||||
/**
|
||||
* 开启/关闭美颜
|
||||
*
|
||||
* @param enable true:开启; false: 关闭
|
||||
* @return 见ErrorCode
|
||||
*/
|
||||
fun enable(enable: Boolean): Int
|
||||
|
||||
/**
|
||||
* 本地视图渲染,由内部来处理镜像问题
|
||||
*
|
||||
* @param view SurfaceView或TextureView
|
||||
* @param renderMode 渲染缩放模式
|
||||
* @return 见ErrorCode
|
||||
*/
|
||||
fun setupLocalVideo(view: View, renderMode: Int = Constants.RENDER_MODE_HIDDEN): Int
|
||||
|
||||
/**
|
||||
* 当ProcessMode==Custom时由外部传入原始视频帧
|
||||
*
|
||||
* @param videoFrame 原始视频帧
|
||||
* @return 见ErrorCode
|
||||
*/
|
||||
fun onFrame(videoFrame: VideoFrame): Int
|
||||
|
||||
/**
|
||||
* 声网提供的美颜最佳默认参数
|
||||
*
|
||||
* @return 见ErrorCode
|
||||
*/
|
||||
fun setBeautyPreset(preset: BeautyPreset = BeautyPreset.DEFAULT): Int
|
||||
|
||||
/**
|
||||
* 更新摄像头配置
|
||||
*/
|
||||
fun updateCameraConfig(config: CameraConfig): Int
|
||||
|
||||
/**
|
||||
* 是否是前置摄像头
|
||||
* PS:只在美颜处理中才能知道准确的值,否则会一直是true
|
||||
*/
|
||||
fun isFrontCamera(): Boolean
|
||||
|
||||
/**
|
||||
* 获取镜像状态
|
||||
*
|
||||
* @return 镜像状态,true: 镜像,false:非镜像
|
||||
*/
|
||||
fun getMirrorApplied(): Boolean
|
||||
|
||||
/**
|
||||
* 在处理线程里执行操作
|
||||
*
|
||||
* @param run 操作run
|
||||
*/
|
||||
fun runOnProcessThread(run: ()->Unit)
|
||||
|
||||
/**
|
||||
* 私参配置,用于不对外api的调用,多用于测试
|
||||
*/
|
||||
fun setParameters(key: String, value: String)
|
||||
|
||||
/**
|
||||
* 释放资源,一旦释放后这个实例将无法使用
|
||||
*
|
||||
* @return 见ErrorCode
|
||||
*/
|
||||
fun release(): Int
|
||||
|
||||
}
|
||||
@@ -0,0 +1,818 @@
|
||||
/*
|
||||
* MIT License
|
||||
*
|
||||
* Copyright (c) 2023 Agora Community
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
|
||||
package io.agora.beautyapi.faceunity
|
||||
|
||||
import android.graphics.Matrix
|
||||
import android.opengl.GLES11Ext
|
||||
import android.opengl.GLES20
|
||||
import android.view.SurfaceView
|
||||
import android.view.TextureView
|
||||
import android.view.View
|
||||
import com.faceunity.core.entity.FUBundleData
|
||||
import com.faceunity.core.entity.FURenderInputData
|
||||
import com.faceunity.core.enumeration.CameraFacingEnum
|
||||
import com.faceunity.core.enumeration.FUInputBufferEnum
|
||||
import com.faceunity.core.enumeration.FUInputTextureEnum
|
||||
import com.faceunity.core.enumeration.FUTransformMatrixEnum
|
||||
import com.faceunity.core.faceunity.FUAIKit
|
||||
import com.faceunity.core.faceunity.FURenderKit
|
||||
import com.faceunity.core.model.facebeauty.FaceBeauty
|
||||
import com.faceunity.core.model.facebeauty.FaceBeautyFilterEnum
|
||||
import io.agora.base.TextureBufferHelper
|
||||
import io.agora.base.VideoFrame
|
||||
import io.agora.base.VideoFrame.I420Buffer
|
||||
import io.agora.base.VideoFrame.SourceType
|
||||
import io.agora.base.VideoFrame.TextureBuffer
|
||||
import io.agora.base.internal.video.EglBase
|
||||
import io.agora.base.internal.video.YuvHelper
|
||||
import io.agora.beautyapi.faceunity.utils.FuDeviceUtils
|
||||
import io.agora.beautyapi.faceunity.utils.LogUtils
|
||||
import io.agora.beautyapi.faceunity.utils.StatsHelper
|
||||
import io.agora.beautyapi.faceunity.utils.egl.GLFrameBuffer
|
||||
import io.agora.beautyapi.faceunity.utils.egl.TextureProcessHelper
|
||||
import io.agora.rtc2.Constants
|
||||
import io.agora.rtc2.gl.EglBaseProvider
|
||||
import io.agora.rtc2.video.IVideoFrameObserver
|
||||
import io.agora.rtc2.video.VideoCanvas
|
||||
import java.io.File
|
||||
import java.nio.ByteBuffer
|
||||
import java.util.Collections
|
||||
import java.util.concurrent.Callable
|
||||
|
||||
class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver {
|
||||
private val TAG = "FaceUnityBeautyAPIImpl"
|
||||
private val reportId = "scenarioAPI"
|
||||
private val reportCategory = "beauty_android_$VERSION"
|
||||
private var beautyMode = 0 // 0: 自动根据buffer类型切换,1:固定使用OES纹理,2:固定使用i420,3: 单纹理模式
|
||||
private var enableTextureAsync = true // 是否开启纹理+异步缓存处理,不支持在预览中实时切换。对于GPU性能好的手机可以减小美颜处理耗时,对于中端机开启后效果也不明显。
|
||||
|
||||
private var textureBufferHelper: TextureBufferHelper? = null
|
||||
private var wrapTextureBufferHelper: TextureBufferHelper? = null
|
||||
private var byteBuffer: ByteBuffer? = null
|
||||
private var byteArray: ByteArray? = null
|
||||
private var config: Config? = null
|
||||
private var enable: Boolean = false
|
||||
private var enableChange: Boolean = false
|
||||
private var isReleased: Boolean = false
|
||||
private var captureMirror = false
|
||||
private var renderMirror = false
|
||||
private val identityMatrix = Matrix()
|
||||
private var mTextureProcessHelper: TextureProcessHelper? = null
|
||||
private var statsHelper: StatsHelper? = null
|
||||
private var skipFrame = 0
|
||||
private enum class ProcessSourceType{
|
||||
UNKNOWN,
|
||||
TEXTURE_OES_ASYNC,
|
||||
TEXTURE_2D_ASYNC,
|
||||
TEXTURE_OES,
|
||||
TEXTURE_2D,
|
||||
I420
|
||||
}
|
||||
private var currProcessSourceType = ProcessSourceType.UNKNOWN
|
||||
private var deviceLevel = FuDeviceUtils.DEVICEINFO_UNKNOWN
|
||||
private var isFrontCamera = true
|
||||
private var cameraConfig = CameraConfig()
|
||||
private var localVideoRenderMode = Constants.RENDER_MODE_HIDDEN
|
||||
private val pendingProcessRunList = Collections.synchronizedList(mutableListOf<()->Unit>())
|
||||
private val transformGLFrameBuffer = GLFrameBuffer()
|
||||
|
||||
override fun initialize(config: Config): Int {
|
||||
if (this.config != null) {
|
||||
LogUtils.e(TAG, "initialize >> The beauty api has been initialized!")
|
||||
return ErrorCode.ERROR_HAS_INITIALIZED.value
|
||||
}
|
||||
this.config = config
|
||||
this.cameraConfig = config.cameraConfig
|
||||
if (config.captureMode == CaptureMode.Agora) {
|
||||
config.rtcEngine.registerVideoFrameObserver(this)
|
||||
}
|
||||
statsHelper = StatsHelper(config.statsDuration){
|
||||
this.config?.eventCallback?.onBeautyStats(it)
|
||||
}
|
||||
LogUtils.i(TAG, "initialize >> config = $config")
|
||||
LogUtils.i(TAG, "initialize >> beauty api version=$VERSION, beauty sdk version=${FURenderKit.getInstance().getVersion()}")
|
||||
|
||||
// config face beauty
|
||||
if (deviceLevel == FuDeviceUtils.DEVICEINFO_UNKNOWN) {
|
||||
deviceLevel = FuDeviceUtils.judgeDeviceLevel(config.context)
|
||||
FUAIKit.getInstance().faceProcessorSetFaceLandmarkQuality(deviceLevel)
|
||||
if (deviceLevel > FuDeviceUtils.DEVICE_LEVEL_MID) {
|
||||
FUAIKit.getInstance().fuFaceProcessorSetDetectSmallFace(true)
|
||||
}
|
||||
}
|
||||
LogUtils.i(TAG, "initialize >> FuDeviceUtils deviceLevel=$deviceLevel")
|
||||
config.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "initialize", "config=$config, deviceLevel=$deviceLevel", 0)
|
||||
return ErrorCode.ERROR_OK.value
|
||||
}
|
||||
|
||||
override fun enable(enable: Boolean): Int {
|
||||
LogUtils.i(TAG, "enable >> enable = $enable")
|
||||
if (config == null) {
|
||||
LogUtils.e(TAG, "enable >> The beauty api has not been initialized!")
|
||||
return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value
|
||||
}
|
||||
if (isReleased) {
|
||||
LogUtils.e(TAG, "enable >> The beauty api has been released!")
|
||||
return ErrorCode.ERROR_HAS_RELEASED.value
|
||||
}
|
||||
if(config?.captureMode == CaptureMode.Custom){
|
||||
skipFrame = 2
|
||||
LogUtils.i(TAG, "enable >> skipFrame = $skipFrame")
|
||||
}
|
||||
config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "enable", "enable=$enable", 0)
|
||||
|
||||
if(this.enable != enable){
|
||||
this.enable = enable
|
||||
enableChange = true
|
||||
LogUtils.i(TAG, "enable >> enableChange")
|
||||
}
|
||||
return ErrorCode.ERROR_OK.value
|
||||
}
|
||||
|
||||
override fun setupLocalVideo(view: View, renderMode: Int): Int {
|
||||
val rtcEngine = config?.rtcEngine
|
||||
if(rtcEngine == null){
|
||||
LogUtils.e(TAG, "setupLocalVideo >> The beauty api has not been initialized!")
|
||||
return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value
|
||||
}
|
||||
LogUtils.i(TAG, "setupLocalVideo >> view=$view, renderMode=$renderMode")
|
||||
localVideoRenderMode = renderMode
|
||||
rtcEngine.sendCustomReportMessage(reportId, reportCategory, "enable", "view=$view, renderMode=$renderMode", 0)
|
||||
if (view is TextureView || view is SurfaceView) {
|
||||
val canvas = VideoCanvas(view, renderMode, 0)
|
||||
canvas.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED
|
||||
rtcEngine.setupLocalVideo(canvas)
|
||||
return ErrorCode.ERROR_OK.value
|
||||
}
|
||||
return ErrorCode.ERROR_VIEW_TYPE_ERROR.value
|
||||
}
|
||||
|
||||
override fun onFrame(videoFrame: VideoFrame): Int {
|
||||
val conf = config
|
||||
if(conf == null){
|
||||
LogUtils.e(TAG, "onFrame >> The beauty api has not been initialized!")
|
||||
return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value
|
||||
}
|
||||
if (isReleased) {
|
||||
LogUtils.e(TAG, "onFrame >> The beauty api has been released!")
|
||||
return ErrorCode.ERROR_HAS_RELEASED.value
|
||||
}
|
||||
if (conf.captureMode != CaptureMode.Custom) {
|
||||
LogUtils.e(TAG, "onFrame >> The capture mode is not Custom!")
|
||||
return ErrorCode.ERROR_PROCESS_NOT_CUSTOM.value
|
||||
}
|
||||
if (processBeauty(videoFrame)) {
|
||||
return ErrorCode.ERROR_OK.value
|
||||
}
|
||||
LogUtils.i(TAG, "onFrame >> Skip Frame.")
|
||||
return ErrorCode.ERROR_FRAME_SKIPPED.value
|
||||
}
|
||||
|
||||
override fun updateCameraConfig(config: CameraConfig): Int {
|
||||
LogUtils.i(TAG, "updateCameraConfig >> oldCameraConfig=$cameraConfig, newCameraConfig=$config")
|
||||
cameraConfig = CameraConfig(config.frontMirror, config.backMirror)
|
||||
this.config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "updateCameraConfig", "config=$config", 0)
|
||||
|
||||
return ErrorCode.ERROR_OK.value
|
||||
}
|
||||
|
||||
override fun runOnProcessThread(run: () -> Unit) {
|
||||
if (config == null) {
|
||||
LogUtils.e(TAG, "runOnProcessThread >> The beauty api has not been initialized!")
|
||||
return
|
||||
}
|
||||
if (isReleased) {
|
||||
LogUtils.e(TAG, "runOnProcessThread >> The beauty api has been released!")
|
||||
return
|
||||
}
|
||||
if (textureBufferHelper?.handler?.looper?.thread == Thread.currentThread()) {
|
||||
run.invoke()
|
||||
} else if (textureBufferHelper != null) {
|
||||
textureBufferHelper?.handler?.post(run)
|
||||
} else {
|
||||
pendingProcessRunList.add(run)
|
||||
}
|
||||
}
|
||||
|
||||
override fun isFrontCamera() = isFrontCamera
|
||||
|
||||
override fun setParameters(key: String, value: String) {
|
||||
when(key){
|
||||
"beauty_mode" -> beautyMode = value.toInt()
|
||||
"enableTextureAsync" -> enableTextureAsync = value.toBoolean()
|
||||
}
|
||||
}
|
||||
|
||||
override fun setBeautyPreset(preset: BeautyPreset): Int {
|
||||
val conf = config
|
||||
if(conf == null){
|
||||
LogUtils.e(TAG, "setBeautyPreset >> The beauty api has not been initialized!")
|
||||
return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value
|
||||
}
|
||||
if (isReleased) {
|
||||
LogUtils.e(TAG, "setBeautyPreset >> The beauty api has been released!")
|
||||
return ErrorCode.ERROR_HAS_RELEASED.value
|
||||
}
|
||||
|
||||
LogUtils.i(TAG, "setBeautyPreset >> preset = $preset")
|
||||
config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "enable", "preset=$preset", 0)
|
||||
|
||||
val recommendFaceBeauty = FaceBeauty(FUBundleData("graphics" + File.separator + "face_beautification.bundle"))
|
||||
if (preset == BeautyPreset.DEFAULT) {
|
||||
recommendFaceBeauty.filterName = FaceBeautyFilterEnum.FENNEN_1
|
||||
recommendFaceBeauty.filterIntensity = 0.7
|
||||
// 美牙
|
||||
recommendFaceBeauty.toothIntensity = 0.3
|
||||
// 亮眼
|
||||
recommendFaceBeauty.eyeBrightIntensity = 0.3
|
||||
// 大眼
|
||||
recommendFaceBeauty.eyeEnlargingIntensity = 0.5
|
||||
// 红润
|
||||
recommendFaceBeauty.redIntensity = 0.5 * 2
|
||||
// 美白
|
||||
recommendFaceBeauty.colorIntensity = 0.75 * 2
|
||||
// 磨皮
|
||||
recommendFaceBeauty.blurIntensity = 0.75 * 6
|
||||
if (deviceLevel > FuDeviceUtils.DEVICE_LEVEL_MID) {
|
||||
val score = FUAIKit.getInstance().getFaceProcessorGetConfidenceScore(0)
|
||||
if (score > 0.95) {
|
||||
recommendFaceBeauty.blurType = 3
|
||||
recommendFaceBeauty.enableBlurUseMask = true
|
||||
} else {
|
||||
recommendFaceBeauty.blurType = 2
|
||||
recommendFaceBeauty.enableBlurUseMask = false
|
||||
}
|
||||
} else {
|
||||
recommendFaceBeauty.blurType = 2
|
||||
recommendFaceBeauty.enableBlurUseMask = false
|
||||
}
|
||||
// 嘴型
|
||||
recommendFaceBeauty.mouthIntensity = 0.3
|
||||
// 瘦鼻
|
||||
recommendFaceBeauty.noseIntensity = 0.1
|
||||
// 额头
|
||||
recommendFaceBeauty.forHeadIntensity = 0.3
|
||||
// 下巴
|
||||
recommendFaceBeauty.chinIntensity = 0.0
|
||||
// 瘦脸
|
||||
recommendFaceBeauty.cheekThinningIntensity = 0.3
|
||||
// 窄脸
|
||||
recommendFaceBeauty.cheekNarrowIntensity = 0.0
|
||||
// 小脸
|
||||
recommendFaceBeauty.cheekSmallIntensity = 0.0
|
||||
// v脸
|
||||
recommendFaceBeauty.cheekVIntensity = 0.0
|
||||
}
|
||||
conf.fuRenderKit.faceBeauty = recommendFaceBeauty
|
||||
return ErrorCode.ERROR_OK.value
|
||||
}
|
||||
|
||||
override fun release(): Int {
|
||||
val conf = config
|
||||
val fuRenderer = conf?.fuRenderKit
|
||||
if(fuRenderer == null){
|
||||
LogUtils.e(TAG, "release >> The beauty api has not been initialized!")
|
||||
return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value
|
||||
}
|
||||
if (isReleased) {
|
||||
LogUtils.e(TAG, "setBeautyPreset >> The beauty api has been released!")
|
||||
return ErrorCode.ERROR_HAS_RELEASED.value
|
||||
}
|
||||
LogUtils.i(TAG, "release")
|
||||
if (conf.captureMode == CaptureMode.Agora) {
|
||||
conf.rtcEngine.registerVideoFrameObserver(null)
|
||||
}
|
||||
conf.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "release", "", 0)
|
||||
|
||||
isReleased = true
|
||||
textureBufferHelper?.let {
|
||||
textureBufferHelper = null
|
||||
it.handler.removeCallbacksAndMessages(null)
|
||||
it.invoke {
|
||||
fuRenderer.release()
|
||||
mTextureProcessHelper?.release()
|
||||
mTextureProcessHelper = null
|
||||
transformGLFrameBuffer.release()
|
||||
null
|
||||
}
|
||||
// it.handler.looper.quit()
|
||||
it.dispose()
|
||||
}
|
||||
wrapTextureBufferHelper?.let {
|
||||
wrapTextureBufferHelper = null
|
||||
it.dispose()
|
||||
}
|
||||
statsHelper?.reset()
|
||||
statsHelper = null
|
||||
pendingProcessRunList.clear()
|
||||
return ErrorCode.ERROR_OK.value
|
||||
}
|
||||
|
||||
private fun processBeauty(videoFrame: VideoFrame): Boolean {
|
||||
if (isReleased) {
|
||||
LogUtils.e(TAG, "processBeauty >> The beauty api has been released!")
|
||||
return false
|
||||
}
|
||||
|
||||
val cMirror =
|
||||
if (isFrontCamera) {
|
||||
when (cameraConfig.frontMirror) {
|
||||
MirrorMode.MIRROR_LOCAL_REMOTE -> true
|
||||
MirrorMode.MIRROR_LOCAL_ONLY -> false
|
||||
MirrorMode.MIRROR_REMOTE_ONLY -> true
|
||||
MirrorMode.MIRROR_NONE -> false
|
||||
}
|
||||
} else {
|
||||
when (cameraConfig.backMirror) {
|
||||
MirrorMode.MIRROR_LOCAL_REMOTE -> true
|
||||
MirrorMode.MIRROR_LOCAL_ONLY -> false
|
||||
MirrorMode.MIRROR_REMOTE_ONLY -> true
|
||||
MirrorMode.MIRROR_NONE -> false
|
||||
}
|
||||
}
|
||||
val rMirror =
|
||||
if (isFrontCamera) {
|
||||
when (cameraConfig.frontMirror) {
|
||||
MirrorMode.MIRROR_LOCAL_REMOTE -> false
|
||||
MirrorMode.MIRROR_LOCAL_ONLY -> true
|
||||
MirrorMode.MIRROR_REMOTE_ONLY -> true
|
||||
MirrorMode.MIRROR_NONE -> false
|
||||
}
|
||||
} else {
|
||||
when (cameraConfig.backMirror) {
|
||||
MirrorMode.MIRROR_LOCAL_REMOTE -> false
|
||||
MirrorMode.MIRROR_LOCAL_ONLY -> true
|
||||
MirrorMode.MIRROR_REMOTE_ONLY -> true
|
||||
MirrorMode.MIRROR_NONE -> false
|
||||
}
|
||||
}
|
||||
if (captureMirror != cMirror || renderMirror != rMirror) {
|
||||
LogUtils.w(TAG, "processBeauty >> enable=$enable, captureMirror=$captureMirror->$cMirror, renderMirror=$renderMirror->$rMirror")
|
||||
captureMirror = cMirror
|
||||
if(renderMirror != rMirror){
|
||||
renderMirror = rMirror
|
||||
config?.rtcEngine?.setLocalRenderMode(
|
||||
localVideoRenderMode,
|
||||
if(renderMirror) Constants.VIDEO_MIRROR_MODE_ENABLED else Constants.VIDEO_MIRROR_MODE_DISABLED
|
||||
)
|
||||
}
|
||||
textureBufferHelper?.invoke {
|
||||
mTextureProcessHelper?.reset()
|
||||
}
|
||||
skipFrame = 2
|
||||
return false
|
||||
}
|
||||
|
||||
val oldIsFrontCamera = isFrontCamera
|
||||
isFrontCamera = videoFrame.sourceType == SourceType.kFrontCamera
|
||||
if(oldIsFrontCamera != isFrontCamera){
|
||||
LogUtils.w(TAG, "processBeauty >> oldIsFrontCamera=$oldIsFrontCamera, isFrontCamera=$isFrontCamera")
|
||||
return false
|
||||
}
|
||||
|
||||
if(enableChange){
|
||||
enableChange = false
|
||||
textureBufferHelper?.invoke {
|
||||
mTextureProcessHelper?.reset()
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
if(!enable){
|
||||
return true
|
||||
}
|
||||
|
||||
if (textureBufferHelper == null) {
|
||||
textureBufferHelper = TextureBufferHelper.create(
|
||||
"FURender",
|
||||
EglBaseProvider.instance().rootEglBase.eglBaseContext
|
||||
)
|
||||
textureBufferHelper?.invoke {
|
||||
synchronized(pendingProcessRunList){
|
||||
val iterator = pendingProcessRunList.iterator()
|
||||
while (iterator.hasNext()){
|
||||
iterator.next().invoke()
|
||||
iterator.remove()
|
||||
}
|
||||
}
|
||||
}
|
||||
LogUtils.i(TAG, "processBeauty >> create texture buffer, beautyMode=$beautyMode")
|
||||
}
|
||||
if (wrapTextureBufferHelper == null) {
|
||||
wrapTextureBufferHelper = TextureBufferHelper.create(
|
||||
"FURenderWrap",
|
||||
EglBaseProvider.instance().rootEglBase.eglBaseContext
|
||||
)
|
||||
LogUtils.i(TAG, "processBeauty >> create texture buffer wrap, beautyMode=$beautyMode")
|
||||
}
|
||||
val startTime = System.currentTimeMillis()
|
||||
val processTexId = when (beautyMode) {
|
||||
2 -> processBeautySingleBuffer(videoFrame)
|
||||
3 -> {
|
||||
if (enableTextureAsync) {
|
||||
processBeautySingleTextureAsync(videoFrame)
|
||||
} else {
|
||||
processBeautySingleTexture(videoFrame)
|
||||
}
|
||||
}
|
||||
else -> processBeautyAuto(videoFrame)
|
||||
}
|
||||
|
||||
if(config?.statsEnable == true){
|
||||
val costTime = System.currentTimeMillis() - startTime
|
||||
statsHelper?.once(costTime)
|
||||
}
|
||||
|
||||
if (processTexId <= 0) {
|
||||
LogUtils.w(TAG, "processBeauty >> processTexId <= 0")
|
||||
return false
|
||||
}
|
||||
|
||||
if(skipFrame > 0){
|
||||
skipFrame --
|
||||
LogUtils.w(TAG, "processBeauty >> skipFrame=$skipFrame")
|
||||
return false
|
||||
}
|
||||
|
||||
val processBuffer: TextureBuffer = wrapTextureBufferHelper?.wrapTextureBuffer(
|
||||
videoFrame.rotatedWidth,
|
||||
videoFrame.rotatedHeight,
|
||||
TextureBuffer.Type.RGB,
|
||||
processTexId,
|
||||
identityMatrix
|
||||
) ?: return false
|
||||
videoFrame.replaceBuffer(processBuffer, 0, videoFrame.timestampNs)
|
||||
return true
|
||||
}
|
||||
|
||||
private fun processBeautyAuto(videoFrame: VideoFrame): Int {
|
||||
val buffer = videoFrame.buffer
|
||||
return if (buffer is TextureBuffer) {
|
||||
if (enableTextureAsync) {
|
||||
processBeautySingleTextureAsync(videoFrame)
|
||||
} else {
|
||||
processBeautySingleTexture(videoFrame)
|
||||
}
|
||||
} else {
|
||||
processBeautySingleBuffer(videoFrame)
|
||||
}
|
||||
}
|
||||
|
||||
private fun processBeautySingleTextureAsync(videoFrame: VideoFrame): Int {
|
||||
val texBufferHelper = wrapTextureBufferHelper ?: return -1
|
||||
val textureBuffer = videoFrame.buffer as? TextureBuffer ?: return -1
|
||||
|
||||
when(textureBuffer.type){
|
||||
TextureBuffer.Type.OES -> {
|
||||
if(currProcessSourceType != ProcessSourceType.TEXTURE_OES_ASYNC){
|
||||
LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_OES_ASYNC}")
|
||||
if (currProcessSourceType != ProcessSourceType.UNKNOWN) {
|
||||
skipFrame = 3
|
||||
}
|
||||
currProcessSourceType = ProcessSourceType.TEXTURE_OES_ASYNC
|
||||
return -1
|
||||
}
|
||||
}
|
||||
else -> {
|
||||
if(currProcessSourceType != ProcessSourceType.TEXTURE_2D_ASYNC){
|
||||
LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_2D_ASYNC}")
|
||||
if (currProcessSourceType != ProcessSourceType.UNKNOWN) {
|
||||
skipFrame = 3
|
||||
}
|
||||
currProcessSourceType = ProcessSourceType.TEXTURE_2D_ASYNC
|
||||
skipFrame = 6
|
||||
return -1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(mTextureProcessHelper == null) {
|
||||
mTextureProcessHelper = TextureProcessHelper()
|
||||
mTextureProcessHelper?.setFilter { frame ->
|
||||
val fuRenderKit = config?.fuRenderKit ?: return@setFilter -1
|
||||
|
||||
val input = FURenderInputData(frame.width, frame.height)
|
||||
input.texture = FURenderInputData.FUTexture(
|
||||
FUInputTextureEnum.FU_ADM_FLAG_COMMON_TEXTURE,
|
||||
frame.textureId
|
||||
)
|
||||
val isFront = frame.isFrontCamera
|
||||
input.renderConfig.let {
|
||||
if (isFront) {
|
||||
it.cameraFacing = CameraFacingEnum.CAMERA_FRONT
|
||||
it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0
|
||||
it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0
|
||||
it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
|
||||
it.deviceOrientation = 270
|
||||
} else {
|
||||
it.cameraFacing = CameraFacingEnum.CAMERA_BACK
|
||||
it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0
|
||||
it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0
|
||||
it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
|
||||
it.deviceOrientation = 270
|
||||
}
|
||||
}
|
||||
if (isReleased) {
|
||||
return@setFilter -1
|
||||
}
|
||||
val ret = textureBufferHelper?.invoke {
|
||||
synchronized(EglBase.lock){
|
||||
return@invoke fuRenderKit.renderWithInput(input).texture?.texId ?: -1
|
||||
}
|
||||
}
|
||||
return@setFilter ret ?: -1
|
||||
}
|
||||
}
|
||||
|
||||
return texBufferHelper.invoke {
|
||||
if(isReleased){
|
||||
return@invoke -1
|
||||
}
|
||||
|
||||
return@invoke mTextureProcessHelper?.process(
|
||||
textureBuffer.textureId,
|
||||
when (textureBuffer.type) {
|
||||
TextureBuffer.Type.OES -> GLES11Ext.GL_TEXTURE_EXTERNAL_OES
|
||||
else -> GLES20.GL_TEXTURE_2D
|
||||
},
|
||||
textureBuffer.width,
|
||||
textureBuffer.height,
|
||||
videoFrame.rotation,
|
||||
textureBuffer.transformMatrixArray,
|
||||
isFrontCamera,
|
||||
(isFrontCamera && !captureMirror) || (!isFrontCamera && captureMirror)
|
||||
)?: -1
|
||||
}
|
||||
}
|
||||
|
||||
private fun processBeautySingleTexture(videoFrame: VideoFrame): Int {
|
||||
val texBufferHelper = wrapTextureBufferHelper ?: return -1
|
||||
val textureBuffer = videoFrame.buffer as? TextureBuffer ?: return -1
|
||||
|
||||
when(textureBuffer.type){
|
||||
TextureBuffer.Type.OES -> {
|
||||
if(currProcessSourceType != ProcessSourceType.TEXTURE_OES){
|
||||
LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_OES}")
|
||||
if (currProcessSourceType != ProcessSourceType.UNKNOWN) {
|
||||
skipFrame = 3
|
||||
}
|
||||
currProcessSourceType = ProcessSourceType.TEXTURE_OES
|
||||
return -1
|
||||
}
|
||||
}
|
||||
else -> {
|
||||
if(currProcessSourceType != ProcessSourceType.TEXTURE_2D){
|
||||
LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_2D}")
|
||||
if (currProcessSourceType != ProcessSourceType.UNKNOWN) {
|
||||
skipFrame = 3
|
||||
}
|
||||
currProcessSourceType = ProcessSourceType.TEXTURE_2D
|
||||
skipFrame = 6
|
||||
return -1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
val width = videoFrame.rotatedWidth
|
||||
val height = videoFrame.rotatedHeight
|
||||
val isFront = videoFrame.sourceType == SourceType.kFrontCamera
|
||||
val rotation = videoFrame.rotation
|
||||
|
||||
return texBufferHelper.invoke {
|
||||
val fuRenderKit = config?.fuRenderKit ?: return@invoke -1
|
||||
|
||||
transformGLFrameBuffer.setSize(width, height)
|
||||
transformGLFrameBuffer.resetTransform()
|
||||
transformGLFrameBuffer.setTexMatrix(textureBuffer.transformMatrixArray)
|
||||
transformGLFrameBuffer.setRotation(rotation)
|
||||
var flipH = isFront
|
||||
if((isFrontCamera && !captureMirror) || (!isFrontCamera && captureMirror)){
|
||||
flipH = !flipH
|
||||
}
|
||||
transformGLFrameBuffer.setFlipH(flipH)
|
||||
val transformTexId = transformGLFrameBuffer.process(
|
||||
textureBuffer.textureId, when (textureBuffer.type) {
|
||||
TextureBuffer.Type.OES -> GLES11Ext.GL_TEXTURE_EXTERNAL_OES
|
||||
else -> GLES20.GL_TEXTURE_2D
|
||||
}
|
||||
)
|
||||
|
||||
val input = FURenderInputData(width, height)
|
||||
input.texture = FURenderInputData.FUTexture(
|
||||
FUInputTextureEnum.FU_ADM_FLAG_COMMON_TEXTURE,
|
||||
transformTexId
|
||||
)
|
||||
input.renderConfig.let {
|
||||
if (isFront) {
|
||||
it.cameraFacing = CameraFacingEnum.CAMERA_FRONT
|
||||
it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0
|
||||
it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0
|
||||
it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
|
||||
it.deviceOrientation = 270
|
||||
} else {
|
||||
it.cameraFacing = CameraFacingEnum.CAMERA_BACK
|
||||
it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0
|
||||
it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0
|
||||
it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
|
||||
it.deviceOrientation = 270
|
||||
}
|
||||
}
|
||||
if (isReleased) {
|
||||
return@invoke -1
|
||||
}
|
||||
synchronized(EglBase.lock){
|
||||
return@invoke fuRenderKit.renderWithInput(input).texture?.texId ?: -1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun processBeautySingleBuffer(videoFrame: VideoFrame): Int {
|
||||
val texBufferHelper = textureBufferHelper ?: return -1
|
||||
if(currProcessSourceType != ProcessSourceType.I420){
|
||||
LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.I420}")
|
||||
if (currProcessSourceType != ProcessSourceType.UNKNOWN) {
|
||||
skipFrame = 3
|
||||
}
|
||||
currProcessSourceType = ProcessSourceType.I420
|
||||
return -1
|
||||
}
|
||||
val bufferArray = getNV21Buffer(videoFrame) ?: return -1
|
||||
val buffer = videoFrame.buffer
|
||||
val width = buffer.width
|
||||
val height = buffer.height
|
||||
val isFront = videoFrame.sourceType == SourceType.kFrontCamera
|
||||
val mirror = (isFrontCamera && !captureMirror) || (!isFrontCamera && captureMirror)
|
||||
val rotation = videoFrame.rotation
|
||||
|
||||
return texBufferHelper.invoke(Callable {
|
||||
if(isReleased){
|
||||
return@Callable -1
|
||||
}
|
||||
val fuRenderKit = config?.fuRenderKit ?: return@Callable -1
|
||||
val input = FURenderInputData(width, height)
|
||||
input.imageBuffer = FURenderInputData.FUImageBuffer(
|
||||
FUInputBufferEnum.FU_FORMAT_NV21_BUFFER,
|
||||
bufferArray
|
||||
)
|
||||
input.renderConfig.let {
|
||||
if (isFront) {
|
||||
it.cameraFacing = CameraFacingEnum.CAMERA_FRONT
|
||||
it.inputBufferMatrix = if(mirror) {
|
||||
when (rotation) {
|
||||
0 -> FUTransformMatrixEnum.CCROT0
|
||||
180 -> FUTransformMatrixEnum.CCROT180
|
||||
else -> FUTransformMatrixEnum.CCROT90
|
||||
}
|
||||
} else {
|
||||
when (rotation) {
|
||||
0 -> FUTransformMatrixEnum.CCROT0_FLIPHORIZONTAL
|
||||
180 -> FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
|
||||
else -> FUTransformMatrixEnum.CCROT90_FLIPHORIZONTAL
|
||||
}
|
||||
}
|
||||
it.inputTextureMatrix = if(mirror) {
|
||||
when (rotation) {
|
||||
0 -> FUTransformMatrixEnum.CCROT0
|
||||
180 -> FUTransformMatrixEnum.CCROT180
|
||||
else -> FUTransformMatrixEnum.CCROT90
|
||||
}
|
||||
} else {
|
||||
when (rotation) {
|
||||
0 -> FUTransformMatrixEnum.CCROT0_FLIPHORIZONTAL
|
||||
180 -> FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
|
||||
else -> FUTransformMatrixEnum.CCROT90_FLIPHORIZONTAL
|
||||
}
|
||||
}
|
||||
it.deviceOrientation = when(rotation){
|
||||
0 -> 270
|
||||
180 -> 90
|
||||
else -> 0
|
||||
}
|
||||
it.outputMatrix = FUTransformMatrixEnum.CCROT0
|
||||
} else {
|
||||
it.cameraFacing = CameraFacingEnum.CAMERA_BACK
|
||||
it.inputBufferMatrix = if(mirror) {
|
||||
when (rotation) {
|
||||
0 -> FUTransformMatrixEnum.CCROT0_FLIPHORIZONTAL
|
||||
180 -> FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
|
||||
else -> FUTransformMatrixEnum.CCROT90_FLIPVERTICAL
|
||||
}
|
||||
} else {
|
||||
when (rotation) {
|
||||
0 -> FUTransformMatrixEnum.CCROT0
|
||||
180 -> FUTransformMatrixEnum.CCROT180
|
||||
else -> FUTransformMatrixEnum.CCROT270
|
||||
}
|
||||
}
|
||||
it.inputTextureMatrix = if(mirror) {
|
||||
when (rotation) {
|
||||
0 -> FUTransformMatrixEnum.CCROT0_FLIPHORIZONTAL
|
||||
180 -> FUTransformMatrixEnum.CCROT0_FLIPVERTICAL
|
||||
else -> FUTransformMatrixEnum.CCROT90_FLIPVERTICAL
|
||||
}
|
||||
} else {
|
||||
when (rotation) {
|
||||
0 -> FUTransformMatrixEnum.CCROT0
|
||||
180 -> FUTransformMatrixEnum.CCROT180
|
||||
else -> FUTransformMatrixEnum.CCROT270
|
||||
}
|
||||
}
|
||||
it.deviceOrientation = when(rotation){
|
||||
0 -> 270
|
||||
180 -> 90
|
||||
else -> 0
|
||||
}
|
||||
it.outputMatrix = FUTransformMatrixEnum.CCROT0
|
||||
}
|
||||
}
|
||||
|
||||
mTextureProcessHelper?.let {
|
||||
if(it.size() > 0){
|
||||
it.reset()
|
||||
return@Callable -1
|
||||
}
|
||||
}
|
||||
synchronized(EglBase.lock){
|
||||
return@Callable fuRenderKit.renderWithInput(input).texture?.texId ?: -1
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
private fun getNV21Buffer(videoFrame: VideoFrame): ByteArray? {
|
||||
val buffer = videoFrame.buffer
|
||||
val width = buffer.width
|
||||
val height = buffer.height
|
||||
val size = (width * height * 3.0f / 2.0f + 0.5f).toInt()
|
||||
if (byteBuffer == null || byteBuffer?.capacity() != size || byteArray == null || byteArray?.size != size) {
|
||||
byteBuffer?.clear()
|
||||
byteBuffer = ByteBuffer.allocateDirect(size)
|
||||
byteArray = ByteArray(size)
|
||||
return null
|
||||
}
|
||||
val outArray = byteArray ?: return null
|
||||
val outBuffer = byteBuffer ?: return null
|
||||
val i420Buffer = buffer as? I420Buffer ?: buffer.toI420()
|
||||
YuvHelper.I420ToNV12(
|
||||
i420Buffer.dataY, i420Buffer.strideY,
|
||||
i420Buffer.dataV, i420Buffer.strideV,
|
||||
i420Buffer.dataU, i420Buffer.strideU,
|
||||
outBuffer, width, height
|
||||
)
|
||||
outBuffer.position(0)
|
||||
outBuffer.get(outArray)
|
||||
if(buffer !is I420Buffer){
|
||||
i420Buffer.release()
|
||||
}
|
||||
return outArray
|
||||
}
|
||||
|
||||
// IVideoFrameObserver implements
|
||||
|
||||
override fun onCaptureVideoFrame(sourceType: Int, videoFrame: VideoFrame?): Boolean {
|
||||
videoFrame ?: return false
|
||||
return processBeauty(videoFrame)
|
||||
}
|
||||
|
||||
override fun onPreEncodeVideoFrame(sourceType: Int, videoFrame: VideoFrame?) = false
|
||||
|
||||
override fun onMediaPlayerVideoFrame(videoFrame: VideoFrame?, mediaPlayerId: Int) = false
|
||||
|
||||
override fun onRenderVideoFrame(
|
||||
channelId: String?,
|
||||
uid: Int,
|
||||
videoFrame: VideoFrame?
|
||||
) = false
|
||||
|
||||
override fun getVideoFrameProcessMode() = IVideoFrameObserver.PROCESS_MODE_READ_WRITE
|
||||
|
||||
override fun getVideoFormatPreference() = IVideoFrameObserver.VIDEO_PIXEL_DEFAULT
|
||||
|
||||
override fun getRotationApplied() = false
|
||||
|
||||
override fun getMirrorApplied() = captureMirror && !enable
|
||||
|
||||
override fun getObservedFramePosition() = IVideoFrameObserver.POSITION_POST_CAPTURER
|
||||
|
||||
}
|
||||
@@ -0,0 +1,359 @@
|
||||
package io.agora.beautyapi.faceunity.agora;
|
||||
|
||||
import android.app.Activity;
|
||||
import android.os.Bundle;
|
||||
import android.os.Handler;
|
||||
import android.text.TextUtils;
|
||||
import android.util.Log;
|
||||
import android.view.Gravity;
|
||||
import android.view.View;
|
||||
import android.widget.FrameLayout;
|
||||
import android.widget.RelativeLayout;
|
||||
|
||||
import androidx.cardview.widget.CardView;
|
||||
import androidx.fragment.app.FragmentActivity;
|
||||
|
||||
import com.lzf.easyfloat.EasyFloat;
|
||||
import com.lzf.easyfloat.enums.ShowPattern;
|
||||
import com.lzf.easyfloat.interfaces.FloatCallbacks;
|
||||
import com.lzf.easyfloat.interfaces.OnInvokeView;
|
||||
import com.tencent.rtmp.ITXLivePlayListener;
|
||||
import com.tencent.rtmp.TXLivePlayer;
|
||||
import com.tencent.rtmp.ui.TXCloudVideoView;
|
||||
import com.yunbao.common.CommonAppConfig;
|
||||
import com.yunbao.common.Constants;
|
||||
import com.yunbao.common.R;
|
||||
import com.yunbao.common.bean.LiveBean;
|
||||
import com.yunbao.common.dialog.HintDialog;
|
||||
import com.yunbao.common.http.HttpCallback;
|
||||
import com.yunbao.common.http.HttpClient;
|
||||
import com.yunbao.common.manager.IMLoginManager;
|
||||
import com.yunbao.common.utils.DpUtil;
|
||||
import com.yunbao.common.utils.L;
|
||||
import com.yunbao.common.utils.RouteUtil;
|
||||
import com.yunbao.common.views.floatingview.APPEasyFloat;
|
||||
import com.yunbao.common.views.floatingview.FloatingMagnetView;
|
||||
import com.yunbao.common.views.floatingview.MagnetViewListener;
|
||||
import com.yunbao.common.views.weight.ViewClicksAntiShake;
|
||||
|
||||
import kotlin.Unit;
|
||||
import kotlin.jvm.functions.Function1;
|
||||
|
||||
/**
|
||||
* 直播间悬浮窗管理
|
||||
*/
|
||||
public class LiveFloatView implements Function1<FloatCallbacks.Builder, Unit> {
|
||||
|
||||
private static LiveFloatView instance;
|
||||
private LiveOnInvokeView liveOnInvokeView = null;
|
||||
private Activity mContext;
|
||||
private String url;
|
||||
private LiveBean mLiveBean;
|
||||
private int mLiveType;
|
||||
private boolean mLiveSDK;
|
||||
private int mLiveTypeVal;
|
||||
private FloatCallbacks.Builder builder;
|
||||
private String TAG = "LiveFloatView";
|
||||
private TXLivePlayer mPlayer;
|
||||
private FrameLayout videoFrameLayout;
|
||||
private TXCloudVideoView videoView;
|
||||
private SWAuManager swAuManager;
|
||||
|
||||
public static LiveFloatView getInstance() {
|
||||
if (instance == null) {
|
||||
instance = new LiveFloatView();
|
||||
}
|
||||
return instance;
|
||||
}
|
||||
|
||||
public void builderFloat(Activity mContext, String url, Class<?> back) {
|
||||
this.mContext = mContext;
|
||||
this.url = url;
|
||||
if (TextUtils.isEmpty(url)) return;
|
||||
APPEasyFloat.getInstance().layout(R.layout.view_flaot_live)
|
||||
.dragEnable(true)
|
||||
.setAutoMoveToEdge(true)
|
||||
.black(back)
|
||||
.setMagnetViewListener(new MagnetViewListener() {
|
||||
@Override
|
||||
public void invoke(FloatingMagnetView magnetView) {
|
||||
CardView layout = magnetView.findViewById(R.id.layout);
|
||||
RelativeLayout.LayoutParams cardParams = (RelativeLayout.LayoutParams) layout.getLayoutParams();
|
||||
if (mLiveBean.getLandscape() == 1) {
|
||||
cardParams.height = DpUtil.dp2px(190);
|
||||
cardParams.width = DpUtil.dp2px(255);
|
||||
} else {
|
||||
cardParams.height = DpUtil.dp2px(224);
|
||||
cardParams.width = DpUtil.dp2px(126);
|
||||
}
|
||||
if(mLiveSDK){
|
||||
L.eSw("mLiveSDK == Constants.LIVE_S2222222222222222");
|
||||
videoFrameLayout = magnetView.findViewById(R.id.videoFrameLayout);
|
||||
swAuManager = SWAuManager.get();
|
||||
swAuManager.setAudienceContainer(videoFrameLayout);
|
||||
swAuManager.initRtcEngine( mContext);
|
||||
swAuManager.setupRemoteVideo(Integer.parseInt(mLiveBean.getUid()));
|
||||
//进入主播房间
|
||||
swAuManager.joinRoom(CommonAppConfig.getInstance().getUid(), CommonAppConfig.SWToken,mLiveBean.getUid(), SWAuManager.getChannelName(mLiveBean.getUid()));
|
||||
}else{
|
||||
videoView = magnetView.findViewById(R.id.video_view);
|
||||
mPlayer = new TXLivePlayer(mContext);
|
||||
mPlayer.setPlayerView(videoView);
|
||||
mPlayer.startPlay(url, TXLivePlayer.PLAY_TYPE_LIVE_FLV);
|
||||
mPlayer.setPlayListener(new ITXLivePlayListener() {
|
||||
@Override
|
||||
public void onPlayEvent(int event, Bundle param) {
|
||||
float videoWidth = param.getInt("EVT_PARAM1", 0);
|
||||
float videoHeight = param.getInt("EVT_PARAM2", 0);
|
||||
Log.e("视频流有", "videoWidth:" + videoWidth + " videoHeight:" + videoHeight);
|
||||
if (mLiveBean.getLandscape() != 1){
|
||||
if (videoWidth > 720) {
|
||||
cardParams.height = DpUtil.dp2px(190);
|
||||
cardParams.width = DpUtil.dp2px(255);
|
||||
} else {
|
||||
cardParams.height = DpUtil.dp2px(224);
|
||||
cardParams.width = DpUtil.dp2px(126);
|
||||
}
|
||||
layout.setLayoutParams(cardParams);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onNetStatus(Bundle bundle) {
|
||||
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
ViewClicksAntiShake.clicksAntiShake(magnetView.findViewById(R.id.btn_close), new ViewClicksAntiShake.ViewClicksCallBack() {
|
||||
@Override
|
||||
public void onViewClicks() {
|
||||
if(mLiveSDK){
|
||||
SWAuManager.get().exitChannelAll();
|
||||
}else{
|
||||
mPlayer.stopPlay(true);
|
||||
}
|
||||
APPEasyFloat.getInstance().dismiss(mContext);
|
||||
if (IMLoginManager.get(mContext).isHint2() && !((FragmentActivity) mContext).getSupportFragmentManager().isDestroyed()) {
|
||||
HintDialog fragment = new HintDialog();
|
||||
Bundle args = new Bundle();
|
||||
args.putString("close", "1");
|
||||
fragment.setArguments(args);
|
||||
fragment.show(((FragmentActivity) mContext).getSupportFragmentManager(), "HintDialog");
|
||||
}
|
||||
}
|
||||
});
|
||||
if(mLiveSDK){
|
||||
ViewClicksAntiShake.clicksAntiShake(videoFrameLayout, () -> {
|
||||
SWAuManager.get().exitChannelAll();
|
||||
APPEasyFloat.getInstance().dismiss(mContext);
|
||||
new Handler().post(liveCheck);
|
||||
});
|
||||
}else{
|
||||
ViewClicksAntiShake.clicksAntiShake(videoView, () -> {
|
||||
mPlayer.stopPlay(true);
|
||||
APPEasyFloat.getInstance().dismiss(mContext);
|
||||
new Handler().post(liveCheck);
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRemove() {
|
||||
//
|
||||
}
|
||||
|
||||
@Override
|
||||
public void dismiss() {
|
||||
if(mLiveSDK){
|
||||
SWAuManager.get().exitChannelAll();
|
||||
}else{
|
||||
if (mPlayer != null)
|
||||
mPlayer.stopPlay(true);
|
||||
}
|
||||
|
||||
}
|
||||
})
|
||||
.show(mContext);
|
||||
|
||||
}
|
||||
|
||||
public void builderSystemFloat(Activity mContext, String url) {
|
||||
liveOnInvokeView = new LiveOnInvokeView();
|
||||
this.mContext = mContext;
|
||||
this.url = url;
|
||||
if (TextUtils.isEmpty(url)) return;
|
||||
EasyFloat.with(mContext)
|
||||
.setTag(TAG)
|
||||
.setLayout(R.layout.view_flaot_live, liveOnInvokeView)
|
||||
.setShowPattern(ShowPattern.ALL_TIME)
|
||||
.setFilter()
|
||||
.setGravity(Gravity.END | Gravity.CENTER_VERTICAL, 0, 200)
|
||||
.registerCallback(this)
|
||||
.show();
|
||||
}
|
||||
|
||||
public LiveFloatView cacheLiveData(LiveBean mLiveBean, int mLiveType, boolean mLiveSDK, int mLiveTypeVal) {
|
||||
this.mLiveBean = mLiveBean;
|
||||
this.mLiveType = mLiveType;
|
||||
this.mLiveSDK = mLiveSDK;
|
||||
this.mLiveTypeVal = mLiveTypeVal;
|
||||
return instance;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Unit invoke(FloatCallbacks.Builder builder) {
|
||||
this.builder = builder;
|
||||
builder.createResult((aBoolean, s, view) -> {
|
||||
Log.e("LiveFloatView", "aBoolean:" + aBoolean + "---------------------s:" + s);
|
||||
if (callback != null) {
|
||||
callback.invoke(aBoolean);
|
||||
}
|
||||
builder.dismiss(() -> {
|
||||
if (mPlayer != null && mPlayer.isPlaying()) {
|
||||
mPlayer.stopPlay(true);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
builder.hide(view12 -> {
|
||||
if (mPlayer != null && mPlayer.isPlaying()) {
|
||||
mPlayer.pause();
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
return null;
|
||||
});
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* 悬浮窗布局管理
|
||||
*/
|
||||
private class LiveOnInvokeView implements OnInvokeView {
|
||||
|
||||
@Override
|
||||
public void invoke(View view) {
|
||||
videoFrameLayout = view.findViewById(R.id.videoFrameLayout);
|
||||
L.eSw("invoke(View view) invoke(View view) invoke(View view) invoke(View view) ");
|
||||
CardView layout = view.findViewById(R.id.layout);
|
||||
RelativeLayout.LayoutParams cardParams = (RelativeLayout.LayoutParams) layout.getLayoutParams();
|
||||
if (mLiveBean.getLandscape() == 1) {
|
||||
cardParams.height = DpUtil.dp2px(190);
|
||||
cardParams.width = DpUtil.dp2px(255);
|
||||
} else {
|
||||
cardParams.height = DpUtil.dp2px(224);
|
||||
cardParams.width = DpUtil.dp2px(126);
|
||||
}
|
||||
layout.setLayoutParams(cardParams);
|
||||
if(mLiveSDK){
|
||||
L.eSw("mLiveSDK == Constants.LIVE_SDK_SW mLive1111111");
|
||||
swAuManager = SWAuManager.get();
|
||||
swAuManager.setAudienceContainer(videoFrameLayout);
|
||||
swAuManager.initRtcEngine( mContext);
|
||||
swAuManager.setupRemoteVideo(Integer.parseInt(mLiveBean.getUid()));
|
||||
//进入主播房间
|
||||
swAuManager.joinRoom(CommonAppConfig.getInstance().getUid(), CommonAppConfig.SWToken, mLiveBean.getUid(),SWAuManager.getChannelName(mLiveBean.getUid()));
|
||||
}else{
|
||||
TXCloudVideoView videoView = view.findViewById(R.id.video_view);
|
||||
mPlayer = new TXLivePlayer(mContext);
|
||||
mPlayer.setPlayerView(videoView);
|
||||
mPlayer.startPlay(url, TXLivePlayer.PLAY_TYPE_LIVE_FLV);
|
||||
mPlayer.setPlayListener(new ITXLivePlayListener() {
|
||||
@Override
|
||||
public void onPlayEvent(int event, Bundle param) {
|
||||
float videoWidth = param.getInt("EVT_PARAM1", 0);
|
||||
float videoHeight = param.getInt("EVT_PARAM2", 0);
|
||||
Log.e("视频流有", "videoWidth:" + videoWidth + " videoHeight:" + videoHeight);
|
||||
if (mLiveBean.getLandscape() != 1){
|
||||
if (videoWidth > 720) {
|
||||
cardParams.height = DpUtil.dp2px(190);
|
||||
cardParams.width = DpUtil.dp2px(255);
|
||||
} else {
|
||||
cardParams.height = DpUtil.dp2px(224);
|
||||
cardParams.width = DpUtil.dp2px(126);
|
||||
}
|
||||
layout.setLayoutParams(cardParams);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onNetStatus(Bundle bundle) {
|
||||
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
ViewClicksAntiShake.clicksAntiShake(view.findViewById(R.id.btn_close), new ViewClicksAntiShake.ViewClicksCallBack() {
|
||||
@Override
|
||||
public void onViewClicks() {
|
||||
if(mLiveSDK){
|
||||
SWAuManager.get().exitChannelAll();
|
||||
}else{
|
||||
mPlayer.stopPlay(true);
|
||||
}
|
||||
|
||||
EasyFloat.dismiss("LiveFloatView", true);
|
||||
if (IMLoginManager.get(mContext).isHint2() && !((FragmentActivity) mContext).getSupportFragmentManager().isDestroyed()) {
|
||||
HintDialog fragment = new HintDialog();
|
||||
Bundle args = new Bundle();
|
||||
args.putString("close", "1");
|
||||
fragment.setArguments(args);
|
||||
fragment.show(((FragmentActivity) mContext).getSupportFragmentManager(), "HintDialog");
|
||||
}
|
||||
}
|
||||
});
|
||||
ViewClicksAntiShake.clicksAntiShake(view, () -> {
|
||||
if(mLiveSDK){
|
||||
SWAuManager.get().exitChannelAll();
|
||||
}else{
|
||||
mPlayer.stopPlay(true);
|
||||
}
|
||||
new Handler().post(liveCheck);
|
||||
|
||||
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private LiveFloatViewCallback callback;
|
||||
|
||||
public LiveFloatView setCallback(LiveFloatViewCallback callback) {
|
||||
this.callback = callback;
|
||||
return this;
|
||||
}
|
||||
|
||||
public interface LiveFloatViewCallback {
|
||||
void invoke(Boolean aBoolean);
|
||||
|
||||
}
|
||||
|
||||
private Runnable liveCheck = new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
HttpClient.getInstance().get("Live.checkLive", "")
|
||||
.params("liveuid", mLiveBean.getUid())
|
||||
.params("stream", mLiveBean.getStream())
|
||||
.execute(new HttpCallback() {
|
||||
@Override
|
||||
public void onSuccess(int code, String msg, String[] info) {
|
||||
if (code == 0) {
|
||||
|
||||
RouteUtil.forwardLiveAudienceActivity(mLiveBean, mLiveType, mLiveSDK ? Constants.LIVE_SDK_SW:Constants.LIVE_SDK_TX, mLiveTypeVal,mLiveSDK);
|
||||
|
||||
} else {
|
||||
//判断是否有直播悬浮窗,有直接关闭
|
||||
if (EasyFloat.isShow("LiveFloatView")) {
|
||||
EasyFloat.dismiss("LiveFloatView", true);
|
||||
} else {
|
||||
APPEasyFloat.getInstance().dismiss(mContext);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
@@ -0,0 +1,429 @@
|
||||
package io.agora.beautyapi.faceunity.agora;
|
||||
|
||||
import android.app.Activity;
|
||||
import android.content.Context;
|
||||
import android.util.Log;
|
||||
import android.view.SurfaceView;
|
||||
import android.widget.FrameLayout;
|
||||
|
||||
|
||||
import com.yunbao.common.CommonAppConfig;
|
||||
import com.yunbao.common.CommonAppContext;
|
||||
import com.yunbao.common.bean.BaseModel;
|
||||
import com.yunbao.common.bean.LiveBean;
|
||||
import com.yunbao.common.bean.UserBean;
|
||||
import com.yunbao.common.http.API;
|
||||
import com.yunbao.common.http.ResponseModel;
|
||||
import com.yunbao.common.manager.MicUserManager;
|
||||
import com.yunbao.common.manager.MicedUserManager;
|
||||
import com.yunbao.common.manager.base.BaseCacheManager;
|
||||
import com.yunbao.common.manager.imrongcloud.RongcloudIMManager;
|
||||
import com.yunbao.common.utils.L;
|
||||
import com.yunbao.common.utils.StringUtil;
|
||||
import com.yunbao.common.utils.ToastUtil;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import io.agora.rtc2.ChannelMediaOptions;
|
||||
import io.agora.rtc2.Constants;
|
||||
import io.agora.rtc2.IRtcEngineEventHandler;
|
||||
import io.agora.rtc2.RtcConnection;
|
||||
import io.agora.rtc2.RtcEngineConfig;
|
||||
import io.agora.rtc2.RtcEngineEx;
|
||||
import io.agora.rtc2.video.VideoCanvas;
|
||||
import io.reactivex.android.schedulers.AndroidSchedulers;
|
||||
import io.reactivex.functions.Consumer;
|
||||
import io.reactivex.schedulers.Schedulers;
|
||||
import io.rong.imkit.IMCenter;
|
||||
import io.rong.imlib.IRongCallback;
|
||||
import io.rong.imlib.RongIMClient;
|
||||
import io.rong.imlib.model.Conversation;
|
||||
import io.rong.imlib.model.Message;
|
||||
import io.rong.message.TextMessage;
|
||||
|
||||
/**
|
||||
* 声网主播管理类
|
||||
*/
|
||||
public class SWAuManager extends BaseCacheManager {
|
||||
|
||||
private Activity mContext;
|
||||
public static SWAuManager manager;
|
||||
private RtcEngineEx mRtcEngine;
|
||||
private FrameLayout audienceContainer; //主播视图
|
||||
private FrameLayout pkContainer1; //pk主播视图1
|
||||
private FrameLayout pkContainer2; //pk主播视图2
|
||||
private FrameLayout pkContainer3; //pk主播视图3
|
||||
private FrameLayout linkUserContainer;//连麦用户视图
|
||||
|
||||
private int liveMicUid;
|
||||
|
||||
private SwListener swListener;
|
||||
|
||||
public void setSwListener(SwListener swListener) {
|
||||
this.swListener = swListener;
|
||||
}
|
||||
|
||||
public SWAuManager(Context context) {
|
||||
super(context);
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取单列
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static SWAuManager get() {
|
||||
if (null == manager) {
|
||||
manager = new SWAuManager(CommonAppContext.sInstance.getBaseContext());
|
||||
}
|
||||
return manager;
|
||||
}
|
||||
|
||||
IRtcEngineEventHandler mEventHandler = new IRtcEngineEventHandler() {
|
||||
@Override
|
||||
public void onJoinChannelSuccess(String channel, int uid, int elapsed) {
|
||||
super.onJoinChannelSuccess(channel, uid, elapsed);
|
||||
L.eSw("加入房间成功 channel" + channel + " uid" + uid);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onUserJoined(int uid, int elapsed) {
|
||||
super.onUserJoined(uid, elapsed);
|
||||
L.eSw("用户加入房间成功" + uid);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* 初始化声网SDK
|
||||
*/
|
||||
public void initRtcEngine(Activity mContext) {
|
||||
this.mContext = mContext;
|
||||
if(mRtcEngine==null){
|
||||
try {
|
||||
// 创建 RtcEngineConfig 对象,并进行配置
|
||||
RtcEngineConfig config = new RtcEngineConfig();
|
||||
config.mContext = mContext;
|
||||
config.mAppId = CommonAppConfig.getSwAppId();
|
||||
config.mEventHandler = mRtcEventHandler;
|
||||
// 创建并初始化 RtcEngine
|
||||
mRtcEngine = (RtcEngineEx) RtcEngineEx.create(config);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("Check the error.");
|
||||
}
|
||||
}
|
||||
mRtcEngine.setChannelProfile(Constants.CHANNEL_PROFILE_LIVE_BROADCASTING); // 直播模式,引擎級別
|
||||
// 开启本地预览
|
||||
//mRtcEngine.startPreview();
|
||||
}
|
||||
|
||||
/**
|
||||
* 初始化声网SDK-语音房
|
||||
*/
|
||||
public void initRtcEngineAudio(Activity mContext) {
|
||||
this.mContext = mContext;
|
||||
try {
|
||||
if(mRtcEngine==null){
|
||||
RtcEngineConfig config = new RtcEngineConfig();
|
||||
config.mContext = mContext;
|
||||
config.mAppId = CommonAppConfig.getSwAppId();
|
||||
config.mEventHandler = mEventHandler;
|
||||
// 创建并初始化 RtcEngine
|
||||
mRtcEngine = (RtcEngineEx) RtcEngineEx.create(config);
|
||||
}
|
||||
// 创建 RtcEngineConfig 对象,并进行配置
|
||||
mRtcEngine.enableAudio();
|
||||
mRtcEngine.enableLocalAudio(true);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("Check the error.");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建语音房
|
||||
*
|
||||
* @param token
|
||||
* @param channelName
|
||||
* @param uid
|
||||
*/
|
||||
public void joinAudioRoom(String channelName, String token, int uid) {
|
||||
L.eSw("加入房间" + channelName + " uid" + uid);
|
||||
// 直播场景下,设置频道场景为 BROADCASTING (直播场景)
|
||||
mRtcEngine.setChannelProfile(Constants.CHANNEL_PROFILE_LIVE_BROADCASTING);
|
||||
// 创建 ChannelMediaOptions 对象,并进行配置
|
||||
ChannelMediaOptions options = new ChannelMediaOptions();
|
||||
// 根据场景将用户角色设置为 AUDIENCE (观众)
|
||||
options.clientRoleType = Constants.CLIENT_ROLE_AUDIENCE;
|
||||
mRtcEngine.disableVideo();
|
||||
options.autoSubscribeAudio = true;
|
||||
mRtcEngine.joinChannel(token, channelName, uid, options);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* 更新主播视图
|
||||
*
|
||||
* @param frameLayout
|
||||
*/
|
||||
public void updateMyChannelView(FrameLayout frameLayout, int uid) {
|
||||
mRtcEngine.setupLocalVideo(null);
|
||||
SurfaceView surfaceView = new SurfaceView(mContext);
|
||||
surfaceView.setZOrderMediaOverlay(true);
|
||||
frameLayout.addView(surfaceView);
|
||||
|
||||
VideoCanvas videoCanvas = new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, uid);
|
||||
mRtcEngine.setupRemoteVideo(videoCanvas);
|
||||
}
|
||||
|
||||
/**
|
||||
* 加入房间
|
||||
*/
|
||||
public void joinRoom(String strUid, String token, String liveUid, String channelName) {
|
||||
liveMicUid = Integer.parseInt(liveUid);
|
||||
int uid;
|
||||
if (StringUtil.isEmpty(strUid)) {
|
||||
uid = 0;
|
||||
} else {
|
||||
uid = Integer.parseInt(strUid);
|
||||
}
|
||||
// 创建一个 SurfaceView 对象,并将其作为 FrameLayout 的子对象
|
||||
SurfaceView surfaceView = new SurfaceView(mContext);
|
||||
audienceContainer.addView(surfaceView);
|
||||
|
||||
// 启用视频模块
|
||||
mRtcEngine.enableVideo();
|
||||
|
||||
//走媒体音量,和音质有关
|
||||
mRtcEngine.setAudioProfile(Constants.AUDIO_PROFILE_MUSIC_HIGH_QUALITY);
|
||||
mRtcEngine.setAudioScenario(Constants.AUDIO_SCENARIO_GAME_STREAMING);
|
||||
|
||||
// 创建 ChannelMediaOptions 对象,并进行配置
|
||||
ChannelMediaOptions options = new ChannelMediaOptions();
|
||||
// 根据场景将用户角色设置为 AUDIENCE (观众)
|
||||
options.clientRoleType = Constants.CLIENT_ROLE_AUDIENCE;
|
||||
// 直播场景下,设置频道场景为 BROADCASTING (直播场景)
|
||||
options.audienceLatencyLevel = Constants.AUDIENCE_LATENCY_LEVEL_LOW_LATENCY; // 觀眾走極速直播
|
||||
// 使用临时 Token 加入频道,自行指定用户 ID 并确保其在频道内的唯一性
|
||||
mRtcEngine.joinChannel(token, channelName, uid, options);
|
||||
}
|
||||
|
||||
public void joinExRoomEx(int strUid, String token, String channelName) {
|
||||
// 创建 ChannelMediaOptions 对象,并进行配置
|
||||
ChannelMediaOptions options = new ChannelMediaOptions();
|
||||
// 根据场景将用户角色设置为 AUDIENCE (观众)
|
||||
options.clientRoleType = Constants.CLIENT_ROLE_AUDIENCE;
|
||||
// 直播场景下,设置频道场景为 BROADCASTING (直播场景)
|
||||
options.audienceLatencyLevel = Constants.AUDIENCE_LATENCY_LEVEL_LOW_LATENCY; // 觀眾走極速直播
|
||||
options.autoSubscribeVideo = true;
|
||||
options.autoSubscribeAudio = true;
|
||||
|
||||
RtcConnection rtcConnection = new RtcConnection();
|
||||
rtcConnection.channelId = channelName; //對方主播的頻道
|
||||
rtcConnection.localUid = Integer.parseInt(CommonAppConfig.getInstance().getUid());//自己的ID
|
||||
|
||||
mRtcEngine.joinChannelEx(token, rtcConnection, options, new IRtcEngineEventHandler() {
|
||||
@Override
|
||||
public void onJoinChannelSuccess(String channel, int uid, int elapsed) {
|
||||
super.onJoinChannelSuccess(channel, uid, elapsed);
|
||||
mContext.runOnUiThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
L.eSw("onJoinChannelSuccess:" + channel + " uid " + uid + " elapsed: " + elapsed);
|
||||
SurfaceView surfaceView = new SurfaceView(mContext);
|
||||
surfaceView.setZOrderMediaOverlay(true);
|
||||
pkContainer1.addView(surfaceView);
|
||||
VideoCanvas videoCanvas = new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, strUid);
|
||||
mRtcEngine.setupRemoteVideoEx(videoCanvas, rtcConnection);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(int err) {
|
||||
super.onError(err);
|
||||
L.eSw("onError:" + err);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 与主播连麦
|
||||
*
|
||||
* @param isUp true 上麦 false 下麦
|
||||
*/
|
||||
public void setMicAn(boolean isUp) {
|
||||
L.eSw("setMicAn-isUp:" + isUp);
|
||||
ChannelMediaOptions options = new ChannelMediaOptions();
|
||||
if (isUp) {
|
||||
options.publishCameraTrack = false; //控制是否发视频
|
||||
options.publishMicrophoneTrack = true; //控制是否发音频
|
||||
options.clientRoleType = Constants.CLIENT_ROLE_BROADCASTER; //设置身份为主播
|
||||
} else {
|
||||
options.publishCameraTrack = false; //控制是否发视频
|
||||
options.publishMicrophoneTrack = false; //控制是否发音频
|
||||
options.clientRoleType = Constants.CLIENT_ROLE_AUDIENCE; //设置身份为观众
|
||||
options.audienceLatencyLevel = Constants.AUDIENCE_LATENCY_LEVEL_LOW_LATENCY;//设置为极速直播观众
|
||||
}
|
||||
mRtcEngine.updateChannelMediaOptions(options);
|
||||
}
|
||||
|
||||
public void setMicAudio(int toUid, boolean isEn) {
|
||||
L.eSw("setMicAudio:toUid" + toUid + "isEn " + isEn);
|
||||
mRtcEngine.muteRemoteAudioStream(toUid, isEn);
|
||||
}
|
||||
|
||||
/**
|
||||
* PK-加入对方主播直播间
|
||||
*
|
||||
* @param strUid
|
||||
* @param token
|
||||
* @param toUid
|
||||
* @param channelName
|
||||
*/
|
||||
public void joinChannelDrEx(FrameLayout frameLayout, String strUid, String token, String toUid, String channelName) {
|
||||
int tempUid;
|
||||
if (StringUtil.isEmpty(strUid)) {
|
||||
tempUid = 0;
|
||||
} else {
|
||||
tempUid = Integer.parseInt(strUid);
|
||||
}
|
||||
|
||||
// 创建 ChannelMediaOptions 对象,并进行配置
|
||||
ChannelMediaOptions options = new ChannelMediaOptions();
|
||||
// 根据场景将用户角色设置为 AUDIENCE (观众)
|
||||
options.clientRoleType = Constants.CLIENT_ROLE_AUDIENCE;
|
||||
options.autoSubscribeVideo = true;
|
||||
options.autoSubscribeAudio = true;
|
||||
options.publishMicrophoneTrack = false;
|
||||
options.publishCameraTrack = false;
|
||||
|
||||
RtcConnection rtcConnection = new RtcConnection();
|
||||
rtcConnection.channelId = channelName; //對方主播的頻道
|
||||
rtcConnection.localUid = tempUid;//自己的ID
|
||||
L.eSw("strUid:" + tempUid + "_token:" + token + "_channelName:" + channelName);
|
||||
mRtcEngine.joinChannelEx(token, rtcConnection, options, new IRtcEngineEventHandler() {
|
||||
@Override
|
||||
public void onJoinChannelSuccess(String channel, int scUid, int elapsed) {
|
||||
super.onJoinChannelSuccess(channel, scUid, elapsed);
|
||||
L.eSw("onJoinChannelSuccess:" + channel + " uid " + scUid + " elapsed: " + elapsed);
|
||||
mContext.runOnUiThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
SurfaceView surfaceView = new SurfaceView(mContext);
|
||||
surfaceView.setZOrderMediaOverlay(true);
|
||||
frameLayout.addView(surfaceView);
|
||||
VideoCanvas videoCanvas = new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, Integer.parseInt(toUid));
|
||||
mRtcEngine.setupRemoteVideoEx(videoCanvas, rtcConnection);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onLeaveChannel(RtcStats stats) {
|
||||
super.onLeaveChannel(stats);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(int err) {
|
||||
super.onError(err);
|
||||
L.eSw("onError:" + err);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 退出对方直播间
|
||||
*
|
||||
* @param uid 自己的ID
|
||||
* @param liveUid 对方直播间号
|
||||
*/
|
||||
public void exitChannelToUid(int uid, String liveUid) {
|
||||
RtcConnection rtcConnection = new RtcConnection();
|
||||
rtcConnection.channelId = getChannelName(liveUid); //對方主播的頻道
|
||||
rtcConnection.localUid = uid;//自己的ID
|
||||
mRtcEngine.leaveChannelEx(rtcConnection);
|
||||
}
|
||||
|
||||
/**
|
||||
* 退出所有的直播间
|
||||
*/
|
||||
public void exitChannelAll() {
|
||||
L.eSw("exitChannelAll exitChannelAll exitChannelAll");
|
||||
if (mRtcEngine != null) {
|
||||
mRtcEngine.leaveChannel();
|
||||
}
|
||||
}
|
||||
|
||||
public static String getChannelName(String liveUid) {
|
||||
if ( liveUid != null &&liveUid.contains("g")) {
|
||||
return liveUid;
|
||||
} else {
|
||||
return CommonAppConfig.SWChannelPrefix + liveUid;
|
||||
}
|
||||
}
|
||||
|
||||
//远程监听
|
||||
private final IRtcEngineEventHandler mRtcEventHandler = new IRtcEngineEventHandler() {
|
||||
@Override
|
||||
// 监听频道内的远端用户,获取用户的 uid 信息
|
||||
public void onUserJoined(int uid, int elapsed) {
|
||||
mContext.runOnUiThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
// 获取 uid 后,设置远端视频视图
|
||||
//setupRemoteVideo(uid);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onUserOffline(int uid, int reason) {
|
||||
super.onUserOffline(uid, reason);
|
||||
L.eSw("onUserOffline: uid " + uid + " reason" + reason);
|
||||
if (liveMicUid == uid) {
|
||||
if (swListener != null) {
|
||||
swListener.onUserOffline(liveMicUid);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
//设置对方主播视图
|
||||
public void setupRemoteVideo(int uid) {
|
||||
SurfaceView surfaceView = new SurfaceView(mContext);
|
||||
surfaceView.setZOrderMediaOverlay(true);
|
||||
audienceContainer.addView(surfaceView);
|
||||
// 将 SurfaceView 对象传入声网实时互动 SDK,设置远端视图
|
||||
mRtcEngine.setupRemoteVideo(new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, uid));
|
||||
}
|
||||
|
||||
public void setAudienceContainer(FrameLayout audienceContainer) {
|
||||
this.audienceContainer = audienceContainer;
|
||||
}
|
||||
|
||||
public void setPkContainer1(FrameLayout pkContainer1) {
|
||||
this.pkContainer1 = pkContainer1;
|
||||
}
|
||||
|
||||
public void setPkContainer2(FrameLayout pkContainer2) {
|
||||
this.pkContainer2 = pkContainer2;
|
||||
}
|
||||
|
||||
public void setPkContainer3(FrameLayout pkContainer3) {
|
||||
this.pkContainer3 = pkContainer3;
|
||||
}
|
||||
|
||||
public void setLinkUserContainer(FrameLayout linkUserContainer) {
|
||||
this.linkUserContainer = linkUserContainer;
|
||||
}
|
||||
|
||||
public interface SwListener {
|
||||
void onUserOffline(int liveUid);
|
||||
}
|
||||
|
||||
public void preloadChannel(List<LiveBean> uids){
|
||||
for (int i = 0; i <uids.size(); i++) {
|
||||
int code = mRtcEngine.preloadChannel(CommonAppConfig.SWToken,getChannelName(uids.get(i).getUid()),Integer.parseInt(CommonAppConfig.getInstance().getUid()));
|
||||
L.eSw("设置秒开数据 uid"+uids.get(i).getUid()+" --- userName:"+uids.get(i).getUserNiceName()+" code "+code);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,529 @@
|
||||
package io.agora.beautyapi.faceunity.agora;
|
||||
|
||||
import android.app.Activity;
|
||||
import android.content.Context;
|
||||
import android.view.SurfaceView;
|
||||
import android.widget.FrameLayout;
|
||||
|
||||
import com.yunbao.common.CommonAppConfig;
|
||||
import com.yunbao.common.CommonAppContext;
|
||||
import com.yunbao.common.bean.SwTokenModel;
|
||||
import com.yunbao.common.http.base.HttpCallback;
|
||||
import com.yunbao.common.http.live.LiveNetManager;
|
||||
import com.yunbao.common.manager.IMLoginManager;
|
||||
import com.yunbao.common.manager.base.BaseCacheManager;
|
||||
import com.yunbao.common.utils.L;
|
||||
import com.yunbao.common.utils.StringUtil;
|
||||
import com.yunbao.common.utils.ToastUtil;
|
||||
import com.yunbao.faceunity.utils.FURenderer;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import io.agora.beautyapi.faceunity.BeautyPreset;
|
||||
import io.agora.beautyapi.faceunity.CameraConfig;
|
||||
import io.agora.beautyapi.faceunity.CaptureMode;
|
||||
import io.agora.beautyapi.faceunity.Config;
|
||||
import io.agora.beautyapi.faceunity.FaceUnityBeautyAPI;
|
||||
import io.agora.beautyapi.faceunity.FaceUnityBeautyAPIKt;
|
||||
import io.agora.beautyapi.faceunity.MirrorMode;
|
||||
import io.agora.rtc2.ChannelMediaOptions;
|
||||
import io.agora.rtc2.Constants;
|
||||
import io.agora.rtc2.IRtcEngineEventHandler;
|
||||
import io.agora.rtc2.RtcConnection;
|
||||
import io.agora.rtc2.RtcEngineConfig;
|
||||
import io.agora.rtc2.RtcEngineEx;
|
||||
import io.agora.rtc2.video.ContentInspectConfig;
|
||||
import io.agora.rtc2.video.VideoCanvas;
|
||||
import io.agora.rtc2.video.VideoEncoderConfiguration;
|
||||
|
||||
/**
|
||||
* 声网主播管理类
|
||||
*/
|
||||
public class SWManager extends BaseCacheManager {
|
||||
|
||||
private Activity mContext;
|
||||
public static SWManager manager;
|
||||
private RtcEngineEx mRtcEngine;
|
||||
private final FaceUnityBeautyAPI faceUnityBeautyAPI = FaceUnityBeautyAPIKt.createFaceUnityBeautyAPI();
|
||||
private int uid;
|
||||
private VideoEncoderConfiguration cfg;
|
||||
private FrameLayout anchorContainer; //主播视图
|
||||
private FrameLayout pkContainer; //pk主播视图1
|
||||
private FrameLayout linkUserContainer;//连麦用户视图
|
||||
private onRtcEngineListener onRtcEngineListener;
|
||||
private List<String> drPkUserInfo = new ArrayList<>();
|
||||
|
||||
private onDrPkJoinSuccessListener onDrPkJoinSuccessListener;
|
||||
|
||||
public void setOnDrPkJoinSuccessListener(SWManager.onDrPkJoinSuccessListener onDrPkJoinSuccessListener) {
|
||||
this.onDrPkJoinSuccessListener = onDrPkJoinSuccessListener;
|
||||
}
|
||||
|
||||
public void setOnRtcEngineListener(SWManager.onRtcEngineListener onRtcEngineListener) {
|
||||
this.onRtcEngineListener = onRtcEngineListener;
|
||||
}
|
||||
|
||||
public SWManager(Context context) {
|
||||
super(context);
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取单列
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static SWManager get() {
|
||||
if (null == manager) {
|
||||
manager = new SWManager(CommonAppContext.sInstance.getBaseContext());
|
||||
}
|
||||
return manager;
|
||||
}
|
||||
|
||||
/**
|
||||
* 初始化声网SDK
|
||||
*/
|
||||
public void initRtcEngine(Activity mContext, String strUid) {
|
||||
this.mContext = mContext;
|
||||
if (StringUtil.isEmpty(strUid)) {
|
||||
uid = 0;
|
||||
} else {
|
||||
uid = Integer.parseInt(strUid);
|
||||
}
|
||||
try {
|
||||
if (mRtcEngine == null) {
|
||||
// 创建 RtcEngineConfig 对象,并进行配置
|
||||
RtcEngineConfig config = new RtcEngineConfig();
|
||||
config.mContext = mContext;
|
||||
config.mAppId = CommonAppConfig.getSwAppId();
|
||||
config.mEventHandler = mRtcEventHandler;
|
||||
// 创建并初始化 RtcEngine
|
||||
mRtcEngine = (RtcEngineEx) RtcEngineEx.create(config);
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("Check the error.");
|
||||
}
|
||||
mRtcEngine.setChannelProfile(Constants.CHANNEL_PROFILE_LIVE_BROADCASTING);
|
||||
// 启用视频模块
|
||||
mRtcEngine.enableVideo();
|
||||
// 开启本地预览
|
||||
mRtcEngine.startPreview();
|
||||
|
||||
cfg = new VideoEncoderConfiguration();
|
||||
|
||||
//设置默认分辨率
|
||||
VideoEncoderConfiguration.VideoDimensions videoDimensions = new VideoEncoderConfiguration.VideoDimensions();
|
||||
videoDimensions.width = 720;
|
||||
videoDimensions.height = 1280;
|
||||
cfg.dimensions = videoDimensions;
|
||||
|
||||
cfg.frameRate = 20;
|
||||
cfg.bitrate = 0;
|
||||
//镜像
|
||||
cfg.mirrorMode = VideoEncoderConfiguration.MIRROR_MODE_TYPE.MIRROR_MODE_ENABLED;
|
||||
//设置竖屏
|
||||
cfg.orientationMode = VideoEncoderConfiguration.ORIENTATION_MODE.ORIENTATION_MODE_FIXED_PORTRAIT;
|
||||
|
||||
|
||||
|
||||
mRtcEngine.setVideoEncoderConfiguration(cfg);
|
||||
|
||||
// 创建一个 SurfaceView 对象,并将其作为 FrameLayout 的子对象
|
||||
SurfaceView surfaceView = new SurfaceView(mContext);
|
||||
anchorContainer.addView(surfaceView);
|
||||
Config config = new Config(mContext, mRtcEngine, FURenderer.INSTANCE.mFURenderKit, null, CaptureMode.Agora, 0, false, new CameraConfig(MirrorMode.MIRROR_NONE, MirrorMode.MIRROR_NONE));
|
||||
faceUnityBeautyAPI.initialize(config);
|
||||
|
||||
faceUnityBeautyAPI.setBeautyPreset(BeautyPreset.CUSTOM);
|
||||
//FaceUnityBeautyManage.getInstance().mFURenderKit.setFaceBeauty();
|
||||
|
||||
// 设置视图
|
||||
faceUnityBeautyAPI.setupLocalVideo(surfaceView, Constants.RENDER_MODE_HIDDEN);
|
||||
//faceUnityBeautyAPI.updateCameraConfig(new CameraConfig(MirrorMode.MIRROR_NONE,MirrorMode.MIRROR_NONE));
|
||||
//mRtcEngine.setupLocalVideo(new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, uid));
|
||||
setEnableBeauty(true); //默认开启美颜
|
||||
setContentInspect(); //开启视频截图上传
|
||||
}
|
||||
|
||||
|
||||
public void setEnableBeauty(boolean flag) {
|
||||
if (faceUnityBeautyAPI != null) {
|
||||
faceUnityBeautyAPI.enable(flag);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 设置清晰度
|
||||
* 单人直播:720x1280
|
||||
* 2人连麦PK:360x640
|
||||
* 3人连麦PK:左侧主播 360x640,右侧两个主播 360x320
|
||||
* 4人连麦PK:360X320
|
||||
*
|
||||
* @param selectClarity
|
||||
*/
|
||||
public void setDrPkNumClarity(int selectClarity) {
|
||||
if (cfg != null && mRtcEngine != null) {
|
||||
VideoEncoderConfiguration.VideoDimensions videoDimensions = new VideoEncoderConfiguration.VideoDimensions();
|
||||
switch (selectClarity) {
|
||||
case 1:
|
||||
videoDimensions.width = 720;
|
||||
videoDimensions.height = 1280;
|
||||
cfg.dimensions = videoDimensions;
|
||||
break;
|
||||
case 2:
|
||||
case 3:
|
||||
videoDimensions.width = 360;
|
||||
videoDimensions.height = 640;
|
||||
cfg.dimensions = videoDimensions;
|
||||
break;
|
||||
case 4:
|
||||
videoDimensions.width = 360;
|
||||
videoDimensions.height = 320;
|
||||
cfg.dimensions = videoDimensions;
|
||||
break;
|
||||
}
|
||||
mRtcEngine.setVideoEncoderConfiguration(cfg);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* 设置镜像模式
|
||||
*/
|
||||
public void setMirrorMode() {
|
||||
if (cfg != null && mRtcEngine != null) {
|
||||
L.eSw("setMirrorMode设置镜像" + cfg.mirrorMode);
|
||||
if (cfg.mirrorMode == VideoEncoderConfiguration.MIRROR_MODE_TYPE.MIRROR_MODE_ENABLED) {
|
||||
cfg.mirrorMode = VideoEncoderConfiguration.MIRROR_MODE_TYPE.MIRROR_MODE_DISABLED; //取消镜像
|
||||
} else {
|
||||
cfg.mirrorMode = VideoEncoderConfiguration.MIRROR_MODE_TYPE.MIRROR_MODE_ENABLED; //设置镜像
|
||||
}
|
||||
mRtcEngine.setVideoEncoderConfiguration(cfg);
|
||||
SurfaceView surfaceView = new SurfaceView(mContext);
|
||||
anchorContainer.addView(surfaceView);
|
||||
mRtcEngine.setupLocalVideo(new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, uid));
|
||||
}
|
||||
}
|
||||
|
||||
public void setContentInspect() {
|
||||
ContentInspectConfig config = new ContentInspectConfig();
|
||||
config.extraInfo = "YourExtraInfo";
|
||||
config.moduleCount = 1;
|
||||
// 功能模块的类型为视频截图上传
|
||||
config.modules[0].type = ContentInspectConfig.CONTENT_INSPECT_TYPE_SUPERVISE;
|
||||
// 视频截图上传的频率为 2 秒一次
|
||||
config.modules[0].interval = 2;
|
||||
mRtcEngine.enableContentInspect(true, config);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* 设置前後攝像頭
|
||||
*/
|
||||
public void switchCamera() {
|
||||
if (mRtcEngine != null) {
|
||||
mRtcEngine.switchCamera();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建直播间
|
||||
*/
|
||||
public void createChannel(String token, String channelName) {
|
||||
ChannelMediaOptions options = new ChannelMediaOptions();
|
||||
// 设置角色 BROADCASTER (主播) 或 AUDIENCE (观众)
|
||||
options.clientRoleType = Constants.CLIENT_ROLE_BROADCASTER;
|
||||
options.publishCameraTrack = true;
|
||||
options.publishMicrophoneTrack = true;
|
||||
mRtcEngine.setAudioProfile(Constants.AUDIO_PROFILE_MUSIC_HIGH_QUALITY);
|
||||
mRtcEngine.setAudioScenario(Constants.AUDIO_SCENARIO_GAME_STREAMING);
|
||||
|
||||
// 加入频道
|
||||
mRtcEngine.joinChannel(token, channelName, uid, options);
|
||||
}
|
||||
|
||||
/**
|
||||
* 更新主播视图
|
||||
*
|
||||
* @param frameLayout
|
||||
*/
|
||||
public void updateMyChannelView(FrameLayout frameLayout) {
|
||||
mRtcEngine.setupLocalVideo(null);
|
||||
SurfaceView surfaceView = new SurfaceView(mContext);
|
||||
surfaceView.setZOrderMediaOverlay(true);
|
||||
frameLayout.addView(surfaceView);
|
||||
|
||||
VideoCanvas videoCanvas = new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, uid);
|
||||
mRtcEngine.setupLocalVideo(videoCanvas);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* PK-加入对方主播直播间
|
||||
*
|
||||
* @param strUid
|
||||
* @param token
|
||||
* @param toUid
|
||||
* @param channelName
|
||||
*/
|
||||
public void joinChannelEx(String strUid, String token, String toUid, String channelName) {
|
||||
int tempUid;
|
||||
if (StringUtil.isEmpty(strUid)) {
|
||||
tempUid = 0;
|
||||
} else {
|
||||
tempUid = Integer.parseInt(strUid);
|
||||
}
|
||||
|
||||
// 创建 ChannelMediaOptions 对象,并进行配置
|
||||
ChannelMediaOptions options = new ChannelMediaOptions();
|
||||
// 根据场景将用户角色设置为 AUDIENCE (观众)
|
||||
options.clientRoleType = Constants.CLIENT_ROLE_AUDIENCE;
|
||||
options.autoSubscribeVideo = true;
|
||||
options.autoSubscribeAudio = true;
|
||||
options.publishMicrophoneTrack = false;
|
||||
options.publishCameraTrack = false;
|
||||
|
||||
RtcConnection rtcConnection = new RtcConnection();
|
||||
rtcConnection.channelId = channelName; //對方主播的頻道
|
||||
rtcConnection.localUid = tempUid;//自己的ID
|
||||
L.eSw("strUid:" + tempUid + "_token:" + token + "_channelName:" + channelName);
|
||||
mRtcEngine.joinChannelEx(token, rtcConnection, options, new IRtcEngineEventHandler() {
|
||||
@Override
|
||||
public void onJoinChannelSuccess(String channel, int scUid, int elapsed) {
|
||||
super.onJoinChannelSuccess(channel, scUid, elapsed);
|
||||
L.eSw("onJoinChannelSuccess:" + channel + " uid " + scUid + " elapsed: " + elapsed);
|
||||
mContext.runOnUiThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
SurfaceView surfaceView = new SurfaceView(mContext);
|
||||
surfaceView.setZOrderMediaOverlay(true);
|
||||
pkContainer.addView(surfaceView);
|
||||
VideoCanvas videoCanvas = new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, Integer.parseInt(toUid));
|
||||
mRtcEngine.setupRemoteVideoEx(videoCanvas, rtcConnection);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onLeaveChannel(RtcStats stats) {
|
||||
super.onLeaveChannel(stats);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(int err) {
|
||||
super.onError(err);
|
||||
L.eSw("onError:" + err);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onVideoSizeChanged(Constants.VideoSourceType source, int uid, int width, int height, int rotation) {
|
||||
super.onVideoSizeChanged(source, uid, width, height, rotation);
|
||||
L.eSw("uid:" + uid + "\nwidth:" + width + "\nheight:" + height);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* PK-加入对方主播直播间
|
||||
*
|
||||
* @param strUid
|
||||
* @param token
|
||||
* @param toUid
|
||||
* @param channelName
|
||||
*/
|
||||
public void joinChannelDrEx(FrameLayout frameLayout, String strUid, String token, String toUid, String channelName, int position) {
|
||||
int tempUid;
|
||||
if (StringUtil.isEmpty(strUid)) {
|
||||
tempUid = 0;
|
||||
} else {
|
||||
tempUid = Integer.parseInt(strUid);
|
||||
}
|
||||
|
||||
// 创建 ChannelMediaOptions 对象,并进行配置
|
||||
ChannelMediaOptions options = new ChannelMediaOptions();
|
||||
// 根据场景将用户角色设置为 AUDIENCE (观众)
|
||||
options.clientRoleType = Constants.CLIENT_ROLE_AUDIENCE;
|
||||
options.autoSubscribeVideo = true;
|
||||
options.autoSubscribeAudio = true;
|
||||
options.publishMicrophoneTrack = false;
|
||||
options.publishCameraTrack = false;
|
||||
|
||||
RtcConnection rtcConnection = new RtcConnection();
|
||||
rtcConnection.channelId = channelName; //對方主播的頻道
|
||||
rtcConnection.localUid = tempUid;//自己的ID
|
||||
|
||||
SurfaceView surfaceView = new SurfaceView(mContext);
|
||||
surfaceView.setZOrderMediaOverlay(true);
|
||||
frameLayout.addView(surfaceView);
|
||||
VideoCanvas videoCanvas = new VideoCanvas(surfaceView, VideoCanvas.RENDER_MODE_HIDDEN, Integer.parseInt(toUid));
|
||||
|
||||
L.eSw("strUid:" + tempUid + "_token:" + token + "_channelName:" + channelName);
|
||||
// 用内部通知,刷新多人PK的用户信息
|
||||
mRtcEngine.joinChannelEx(token, rtcConnection, options, new IRtcEngineEventHandler() {
|
||||
@Override
|
||||
public void onJoinChannelSuccess(String channel, int scUid, int elapsed) {
|
||||
super.onJoinChannelSuccess(channel, scUid, elapsed);
|
||||
L.eSw("onJoinChannelSuccess:" + channel + " uid " + scUid + " elapsed: " + elapsed);
|
||||
mContext.runOnUiThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
mRtcEngine.setupRemoteVideoEx(videoCanvas, rtcConnection);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onLeaveChannel(RtcStats stats) {
|
||||
super.onLeaveChannel(stats);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(int err) {
|
||||
super.onError(err);
|
||||
L.eSw("onError:" + err);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 退出对方直播间
|
||||
*
|
||||
* @param uid 自己的ID
|
||||
* @param liveUid 对方直播间号
|
||||
*/
|
||||
public void exitChannelToUid(int uid, String liveUid) {
|
||||
RtcConnection rtcConnection = new RtcConnection();
|
||||
rtcConnection.channelId = getChannelName(liveUid); //對方主播的頻道
|
||||
rtcConnection.localUid = uid;//自己的ID
|
||||
mRtcEngine.leaveChannelEx(rtcConnection);
|
||||
}
|
||||
|
||||
/**
|
||||
* 退出所有频道
|
||||
*/
|
||||
public void exitChannelAll() {
|
||||
if (mRtcEngine != null) {
|
||||
mRtcEngine.leaveChannel();
|
||||
}
|
||||
}
|
||||
|
||||
public void setAnchorContainer(FrameLayout anchorContainer) {
|
||||
this.anchorContainer = anchorContainer;
|
||||
}
|
||||
|
||||
//远程监听
|
||||
private final IRtcEngineEventHandler mRtcEventHandler = new IRtcEngineEventHandler() {
|
||||
@Override
|
||||
// 监听频道内的远端用户,获取用户的 uid 信息
|
||||
public void onUserJoined(int uid, int elapsed) {
|
||||
mContext.runOnUiThread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
// 获取 uid 后,设置远端视频视图
|
||||
//setupRemoteVideo(uid);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onJoinChannelSuccess(String channel, int uid, int elapsed) {
|
||||
super.onJoinChannelSuccess(channel, uid, elapsed);
|
||||
L.eSw("onJoinChannelSuccess 加入频道:channel" + channel + " uid:" + uid + " elapsed:" + elapsed);
|
||||
if (onRtcEngineListener != null) {
|
||||
onRtcEngineListener.onOpenSuccess(channel, uid);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onUserOffline(int uid, int reason) {
|
||||
super.onUserOffline(uid, reason);
|
||||
if (onRtcEngineListener != null) {
|
||||
//onRtcEngineListener.onOpenSuccess(channel, uid);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(int err) {
|
||||
super.onError(err);
|
||||
L.eSw("onError 错误码:" + err);
|
||||
ToastUtil.show("onError:" + err);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onTokenPrivilegeWillExpire(String token) {
|
||||
super.onTokenPrivilegeWillExpire(token);
|
||||
L.eSw("onTokenPrivilegeWillExpire_Token 即将失效");
|
||||
refreshToken();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRequestToken() {
|
||||
super.onRequestToken();
|
||||
L.eSw("onRequestToken_Token失效");
|
||||
refreshToken();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onLeaveChannel(RtcStats stats) {
|
||||
super.onLeaveChannel(stats);
|
||||
L.eSw("onLeaveChannel退出頻道");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onLocalVideoStateChanged(Constants.VideoSourceType source, int state, int error) {
|
||||
super.onLocalVideoStateChanged(source, state, error);
|
||||
L.eSw("onLocalVideoStateChanged_source" + source + " state_" + state + " error_" + error);
|
||||
}
|
||||
};
|
||||
|
||||
private void refreshToken() {
|
||||
LiveNetManager.get(mContext).getSwToken(new HttpCallback<SwTokenModel>() {
|
||||
@Override
|
||||
public void onSuccess(SwTokenModel data) {
|
||||
if (mRtcEngine != null) {
|
||||
mRtcEngine.renewToken(data.getToken());
|
||||
}
|
||||
L.eSw("refreshToken_重新获取Token成功");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onError(String error) {
|
||||
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public interface onRtcEngineListener {
|
||||
void onOpenSuccess(String channel, int uid);
|
||||
void onUserOffline(int uid);
|
||||
}
|
||||
|
||||
public void setPkContainer(FrameLayout pkContainer) {
|
||||
this.pkContainer = pkContainer;
|
||||
}
|
||||
|
||||
public void setLinkUserContainer(FrameLayout linkUserContainer) {
|
||||
this.linkUserContainer = linkUserContainer;
|
||||
}
|
||||
|
||||
public static String getChannelName(String liveUid) {
|
||||
if (liveUid.contains("g")) {
|
||||
return liveUid;
|
||||
} else {
|
||||
return CommonAppConfig.SWChannelPrefix + liveUid;
|
||||
}
|
||||
}
|
||||
|
||||
public interface onDrPkJoinSuccessListener {
|
||||
void joinSuccess1();
|
||||
|
||||
void joinSuccess2();
|
||||
|
||||
void joinSuccess3();
|
||||
|
||||
void joinSuccess4();
|
||||
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,607 @@
|
||||
/*
|
||||
* MIT License
|
||||
*
|
||||
* Copyright (c) 2023 Agora Community
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
*
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
|
||||
package io.agora.beautyapi.faceunity.utils;
|
||||
|
||||
import android.annotation.TargetApi;
|
||||
import android.app.ActivityManager;
|
||||
import android.content.Context;
|
||||
import android.os.Build;
|
||||
import android.text.TextUtils;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.FileReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
|
||||
public class FuDeviceUtils {
|
||||
|
||||
public static final String TAG = "FuDeviceUtils";
|
||||
|
||||
public static final int DEVICE_LEVEL_HIGH = 2;
|
||||
public static final int DEVICE_LEVEL_MID = 1;
|
||||
public static final int DEVICE_LEVEL_LOW = 0;
|
||||
|
||||
/**
|
||||
* The default return value of any method in this class when an
|
||||
* error occurs or when processing fails (Currently set to -1). Use this to check if
|
||||
* the information about the device in question was successfully obtained.
|
||||
*/
|
||||
public static final int DEVICEINFO_UNKNOWN = -1;
|
||||
|
||||
private static final FileFilter CPU_FILTER = new FileFilter() {
|
||||
@Override
|
||||
public boolean accept(File pathname) {
|
||||
String path = pathname.getName();
|
||||
//regex is slow, so checking char by char.
|
||||
if (path.startsWith("cpu")) {
|
||||
for (int i = 3; i < path.length(); i++) {
|
||||
if (!Character.isDigit(path.charAt(i))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Calculates the total RAM of the device through Android API or /proc/meminfo.
|
||||
*
|
||||
* @param c - Context object for current running activity.
|
||||
* @return Total RAM that the device has, or DEVICEINFO_UNKNOWN = -1 in the event of an error.
|
||||
*/
|
||||
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
|
||||
public static long getTotalMemory(Context c) {
|
||||
// memInfo.totalMem not supported in pre-Jelly Bean APIs.
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
|
||||
ActivityManager.MemoryInfo memInfo = new ActivityManager.MemoryInfo();
|
||||
ActivityManager am = (ActivityManager) c.getSystemService(Context.ACTIVITY_SERVICE);
|
||||
am.getMemoryInfo(memInfo);
|
||||
if (memInfo != null) {
|
||||
return memInfo.totalMem;
|
||||
} else {
|
||||
return DEVICEINFO_UNKNOWN;
|
||||
}
|
||||
} else {
|
||||
long totalMem = DEVICEINFO_UNKNOWN;
|
||||
try {
|
||||
FileInputStream stream = new FileInputStream("/proc/meminfo");
|
||||
try {
|
||||
totalMem = parseFileForValue("MemTotal", stream);
|
||||
totalMem *= 1024;
|
||||
} finally {
|
||||
stream.close();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
return totalMem;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Method for reading the clock speed of a CPU core on the device. Will read from either
|
||||
* {@code /sys/devices/system/cpu/cpu0/cpufreq/cpuinfo_max_freq} or {@code /proc/cpuinfo}.
|
||||
*
|
||||
* @return Clock speed of a core on the device, or -1 in the event of an error.
|
||||
*/
|
||||
public static int getCPUMaxFreqKHz() {
|
||||
int maxFreq = DEVICEINFO_UNKNOWN;
|
||||
try {
|
||||
for (int i = 0; i < getNumberOfCPUCores(); i++) {
|
||||
String filename =
|
||||
"/sys/devices/system/cpu/cpu" + i + "/cpufreq/cpuinfo_max_freq";
|
||||
File cpuInfoMaxFreqFile = new File(filename);
|
||||
if (cpuInfoMaxFreqFile.exists() && cpuInfoMaxFreqFile.canRead()) {
|
||||
byte[] buffer = new byte[128];
|
||||
FileInputStream stream = new FileInputStream(cpuInfoMaxFreqFile);
|
||||
try {
|
||||
stream.read(buffer);
|
||||
int endIndex = 0;
|
||||
//Trim the first number out of the byte buffer.
|
||||
while (Character.isDigit(buffer[endIndex]) && endIndex < buffer.length) {
|
||||
endIndex++;
|
||||
}
|
||||
String str = new String(buffer, 0, endIndex);
|
||||
Integer freqBound = Integer.parseInt(str);
|
||||
if (freqBound > maxFreq) {
|
||||
maxFreq = freqBound;
|
||||
}
|
||||
} catch (NumberFormatException e) {
|
||||
//Fall through and use /proc/cpuinfo.
|
||||
} finally {
|
||||
stream.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (maxFreq == DEVICEINFO_UNKNOWN) {
|
||||
FileInputStream stream = new FileInputStream("/proc/cpuinfo");
|
||||
try {
|
||||
int freqBound = parseFileForValue("cpu MHz", stream);
|
||||
freqBound *= 1024; //MHz -> kHz
|
||||
if (freqBound > maxFreq) maxFreq = freqBound;
|
||||
} finally {
|
||||
stream.close();
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
maxFreq = DEVICEINFO_UNKNOWN; //Fall through and return unknown.
|
||||
}
|
||||
return maxFreq;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads the number of CPU cores from the first available information from
|
||||
* {@code /sys/devices/system/cpu/possible}, {@code /sys/devices/system/cpu/present},
|
||||
* then {@code /sys/devices/system/cpu/}.
|
||||
*
|
||||
* @return Number of CPU cores in the phone, or DEVICEINFO_UKNOWN = -1 in the event of an error.
|
||||
*/
|
||||
public static int getNumberOfCPUCores() {
|
||||
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.GINGERBREAD_MR1) {
|
||||
// Gingerbread doesn't support giving a single application access to both cores, but a
|
||||
// handful of devices (Atrix 4G and Droid X2 for example) were released with a dual-core
|
||||
// chipset and Gingerbread; that can let an app in the background run without impacting
|
||||
// the foreground application. But for our purposes, it makes them single core.
|
||||
return 1;
|
||||
}
|
||||
int cores;
|
||||
try {
|
||||
cores = getCoresFromFileInfo("/sys/devices/system/cpu/possible");
|
||||
if (cores == DEVICEINFO_UNKNOWN) {
|
||||
cores = getCoresFromFileInfo("/sys/devices/system/cpu/present");
|
||||
}
|
||||
if (cores == DEVICEINFO_UNKNOWN) {
|
||||
cores = new File("/sys/devices/system/cpu/").listFiles(CPU_FILTER).length;
|
||||
}
|
||||
} catch (SecurityException e) {
|
||||
cores = DEVICEINFO_UNKNOWN;
|
||||
} catch (NullPointerException e) {
|
||||
cores = DEVICEINFO_UNKNOWN;
|
||||
}
|
||||
return cores;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to read file contents from the file location to determine the number of cores on device.
|
||||
*
|
||||
* @param fileLocation The location of the file with CPU information
|
||||
* @return Number of CPU cores in the phone, or DEVICEINFO_UKNOWN = -1 in the event of an error.
|
||||
*/
|
||||
private static int getCoresFromFileInfo(String fileLocation) {
|
||||
InputStream is = null;
|
||||
try {
|
||||
is = new FileInputStream(fileLocation);
|
||||
BufferedReader buf = new BufferedReader(new InputStreamReader(is));
|
||||
String fileContents = buf.readLine();
|
||||
buf.close();
|
||||
return getCoresFromFileString(fileContents);
|
||||
} catch (IOException e) {
|
||||
return DEVICEINFO_UNKNOWN;
|
||||
} finally {
|
||||
if (is != null) {
|
||||
try {
|
||||
is.close();
|
||||
} catch (IOException e) {
|
||||
// Do nothing.
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts from a CPU core information format to number of cores.
|
||||
*
|
||||
* @param str The CPU core information string, in the format of "0-N"
|
||||
* @return The number of cores represented by this string
|
||||
*/
|
||||
private static int getCoresFromFileString(String str) {
|
||||
if (str == null || !str.matches("0-[\\d]+$")) {
|
||||
return DEVICEINFO_UNKNOWN;
|
||||
}
|
||||
return Integer.valueOf(str.substring(2)) + 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method for reading values from system files, using a minimised buffer.
|
||||
*
|
||||
* @param textToMatch - Text in the system files to read for.
|
||||
* @param stream - FileInputStream of the system file being read from.
|
||||
* @return A numerical value following textToMatch in specified the system file.
|
||||
* -1 in the event of a failure.
|
||||
*/
|
||||
private static int parseFileForValue(String textToMatch, FileInputStream stream) {
|
||||
byte[] buffer = new byte[1024];
|
||||
try {
|
||||
int length = stream.read(buffer);
|
||||
for (int i = 0; i < length; i++) {
|
||||
if (buffer[i] == '\n' || i == 0) {
|
||||
if (buffer[i] == '\n') i++;
|
||||
for (int j = i; j < length; j++) {
|
||||
int textIndex = j - i;
|
||||
//Text doesn't match query at some point.
|
||||
if (buffer[j] != textToMatch.charAt(textIndex)) {
|
||||
break;
|
||||
}
|
||||
//Text matches query here.
|
||||
if (textIndex == textToMatch.length() - 1) {
|
||||
return extractValue(buffer, j);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (IOException e) {
|
||||
//Ignore any exceptions and fall through to return unknown value.
|
||||
} catch (NumberFormatException e) {
|
||||
}
|
||||
return DEVICEINFO_UNKNOWN;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method used by {@link #parseFileForValue(String, FileInputStream) parseFileForValue}. Parses
|
||||
* the next available number after the match in the file being read and returns it as an integer.
|
||||
*
|
||||
* @param index - The index in the buffer array to begin looking.
|
||||
* @return The next number on that line in the buffer, returned as an int. Returns
|
||||
* DEVICEINFO_UNKNOWN = -1 in the event that no more numbers exist on the same line.
|
||||
*/
|
||||
private static int extractValue(byte[] buffer, int index) {
|
||||
while (index < buffer.length && buffer[index] != '\n') {
|
||||
if (Character.isDigit(buffer[index])) {
|
||||
int start = index;
|
||||
index++;
|
||||
while (index < buffer.length && Character.isDigit(buffer[index])) {
|
||||
index++;
|
||||
}
|
||||
String str = new String(buffer, 0, start, index - start);
|
||||
return Integer.parseInt(str);
|
||||
}
|
||||
index++;
|
||||
}
|
||||
return DEVICEINFO_UNKNOWN;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取当前剩余内存(ram)
|
||||
*
|
||||
* @param context
|
||||
* @return
|
||||
*/
|
||||
public static long getAvailMemory(Context context) {
|
||||
ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
|
||||
ActivityManager.MemoryInfo mi = new ActivityManager.MemoryInfo();
|
||||
am.getMemoryInfo(mi);
|
||||
return mi.availMem;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取厂商信息
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static String getBrand() {
|
||||
return Build.BRAND;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取手机机型
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static String getModel() {
|
||||
return Build.MODEL;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取硬件信息(cpu型号)
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static String getHardWare() {
|
||||
try {
|
||||
FileReader fr = new FileReader("/proc/cpuinfo");
|
||||
BufferedReader br = new BufferedReader(fr);
|
||||
String text;
|
||||
String last = "";
|
||||
while ((text = br.readLine()) != null) {
|
||||
last = text;
|
||||
}
|
||||
//一般机型的cpu型号都会在cpuinfo文件的最后一行
|
||||
if (last.contains("Hardware")) {
|
||||
String[] hardWare = last.split(":\\s+", 2);
|
||||
return hardWare[1];
|
||||
}
|
||||
} catch (FileNotFoundException e) {
|
||||
e.printStackTrace();
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
return Build.HARDWARE;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Level judgement based on current memory and CPU.
|
||||
*
|
||||
* @param context - Context object.
|
||||
* @return
|
||||
*/
|
||||
public static int judgeDeviceLevel(Context context) {
|
||||
int level;
|
||||
//有一些设备不符合下述的判断规则,则走一个机型判断模式
|
||||
int specialDevice = judgeDeviceLevelInDeviceName();
|
||||
if (specialDevice >= 0) return specialDevice;
|
||||
|
||||
int ramLevel = judgeMemory(context);
|
||||
int cpuLevel = judgeCPU();
|
||||
if (ramLevel == 0 || ramLevel == 1 || cpuLevel == 0) {
|
||||
level = DEVICE_LEVEL_LOW;
|
||||
} else {
|
||||
if (cpuLevel > 1) {
|
||||
level = DEVICE_LEVEL_HIGH;
|
||||
} else {
|
||||
level = DEVICE_LEVEL_MID;
|
||||
}
|
||||
}
|
||||
LogUtils.d(TAG,"DeviceLevel: " + level);
|
||||
return level;
|
||||
}
|
||||
|
||||
/**
|
||||
* -1 不是特定的高低端机型
|
||||
* @return
|
||||
*/
|
||||
private static int judgeDeviceLevelInDeviceName() {
|
||||
String currentDeviceName = getDeviceName();
|
||||
for (String deviceName:upscaleDevice) {
|
||||
if (deviceName.equals(currentDeviceName)) {
|
||||
return DEVICE_LEVEL_HIGH;
|
||||
}
|
||||
}
|
||||
|
||||
for (String deviceName:middleDevice) {
|
||||
if (deviceName.equals(currentDeviceName)) {
|
||||
return DEVICE_LEVEL_MID;
|
||||
}
|
||||
}
|
||||
|
||||
for (String deviceName:lowDevice) {
|
||||
if (deviceName.equals(currentDeviceName)) {
|
||||
return DEVICE_LEVEL_LOW;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
public static final String[] upscaleDevice = {"vivo X6S A","MHA-AL00","VKY-AL00","V1838A"};
|
||||
public static final String[] lowDevice = {};
|
||||
public static final String[] middleDevice = {"OPPO R11s","PAR-AL00","MI 8 Lite","ONEPLUS A6000","PRO 6","PRO 7 Plus"};
|
||||
|
||||
/**
|
||||
* 评定内存的等级.
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private static int judgeMemory(Context context) {
|
||||
long ramMB = getTotalMemory(context) / (1024 * 1024);
|
||||
int level = -1;
|
||||
if (ramMB <= 2000) { //2G或以下的最低档
|
||||
level = 0;
|
||||
} else if (ramMB <= 3000) { //2-3G
|
||||
level = 1;
|
||||
} else if (ramMB <= 4000) { //4G档 2018主流中端机
|
||||
level = 2;
|
||||
} else if (ramMB <= 6000) { //6G档 高端机
|
||||
level = 3;
|
||||
} else { //6G以上 旗舰机配置
|
||||
level = 4;
|
||||
}
|
||||
return level;
|
||||
}
|
||||
|
||||
/**
|
||||
* 评定CPU等级.(按频率和厂商型号综合判断)
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private static int judgeCPU() {
|
||||
int level = 0;
|
||||
String cpuName = getHardWare();
|
||||
int freqMHz = getCPUMaxFreqKHz() / 1024;
|
||||
|
||||
//一个不符合下述规律的高级白名单
|
||||
//如果可以获取到CPU型号名称 -> 根据不同的名称走不同判定策略
|
||||
if (!TextUtils.isEmpty(cpuName)) {
|
||||
if (cpuName.contains("qcom") || cpuName.contains("Qualcomm")) { //高通骁龙
|
||||
return judgeQualcommCPU(cpuName, freqMHz);
|
||||
} else if (cpuName.contains("hi") || cpuName.contains("kirin")) { //海思麒麟
|
||||
return judgeSkinCPU(cpuName, freqMHz);
|
||||
} else if (cpuName.contains("MT")) {//联发科
|
||||
return judgeMTCPU(cpuName, freqMHz);
|
||||
}
|
||||
}
|
||||
|
||||
//cpu型号无法获取的普通规则
|
||||
if (freqMHz <= 1600) { //1.5G 低端
|
||||
level = 0;
|
||||
} else if (freqMHz <= 1950) { //2GHz 低中端
|
||||
level = 1;
|
||||
} else if (freqMHz <= 2500) { //2.2 2.3g 中高端
|
||||
level = 2;
|
||||
} else { //高端
|
||||
level = 3;
|
||||
}
|
||||
return level;
|
||||
}
|
||||
|
||||
/**
|
||||
* 联发科芯片等级判定
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private static int judgeMTCPU(String cpuName, int freqMHz) {
|
||||
//P60之前的全是低端机 MT6771V/C
|
||||
int level = 0;
|
||||
int mtCPUVersion = getMTCPUVersion(cpuName);
|
||||
if (mtCPUVersion == -1) {
|
||||
//读取不出version 按照一个比较严格的方式来筛选出高端机
|
||||
if (freqMHz <= 1600) { //1.5G 低端
|
||||
level = 0;
|
||||
} else if (freqMHz <= 2200) { //2GHz 低中端
|
||||
level = 1;
|
||||
} else if (freqMHz <= 2700) { //2.2 2.3g 中高端
|
||||
level = 2;
|
||||
} else { //高端
|
||||
level = 3;
|
||||
}
|
||||
} else if (mtCPUVersion < 6771) {
|
||||
//均为中低端机
|
||||
if (freqMHz <= 1600) { //1.5G 低端
|
||||
level = 0;
|
||||
} else { //2GHz 中端
|
||||
level = 1;
|
||||
}
|
||||
} else {
|
||||
if (freqMHz <= 1600) { //1.5G 低端
|
||||
level = 0;
|
||||
} else if (freqMHz <= 1900) { //2GHz 低中端
|
||||
level = 1;
|
||||
} else if (freqMHz <= 2500) { //2.2 2.3g 中高端
|
||||
level = 2;
|
||||
} else { //高端
|
||||
level = 3;
|
||||
}
|
||||
}
|
||||
|
||||
return level;
|
||||
}
|
||||
|
||||
/**
|
||||
* 通过联发科CPU型号定义 -> 获取cpu version
|
||||
*
|
||||
* @param cpuName
|
||||
* @return
|
||||
*/
|
||||
private static int getMTCPUVersion(String cpuName) {
|
||||
//截取MT后面的四位数字
|
||||
int cpuVersion = -1;
|
||||
if (cpuName.length() > 5) {
|
||||
String cpuVersionStr = cpuName.substring(2, 6);
|
||||
try {
|
||||
cpuVersion = Integer.valueOf(cpuVersionStr);
|
||||
} catch (NumberFormatException exception) {
|
||||
exception.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
return cpuVersion;
|
||||
}
|
||||
|
||||
/**
|
||||
* 高通骁龙芯片等级判定
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
private static int judgeQualcommCPU(String cpuName, int freqMHz) {
|
||||
int level = 0;
|
||||
//xxxx inc MSM8937 比较老的芯片
|
||||
//7 8 xxx inc SDM710
|
||||
if (cpuName.contains("MSM")) {
|
||||
//老芯片
|
||||
if (freqMHz <= 1600) { //1.5G 低端
|
||||
level = 0;
|
||||
} else { //2GHz 低中端
|
||||
level = 1;
|
||||
}
|
||||
} else {
|
||||
//新的芯片
|
||||
if (freqMHz <= 1600) { //1.5G 低端
|
||||
level = 0;
|
||||
} else if (freqMHz <= 2000) { //2GHz 低中端
|
||||
level = 1;
|
||||
} else if (freqMHz <= 2500) { //2.2 2.3g 中高端
|
||||
level = 2;
|
||||
} else { //高端
|
||||
level = 3;
|
||||
}
|
||||
}
|
||||
|
||||
return level;
|
||||
}
|
||||
|
||||
/**
|
||||
* 麒麟芯片等级判定
|
||||
*
|
||||
* @param freqMHz
|
||||
* @return
|
||||
*/
|
||||
private static int judgeSkinCPU(String cpuName, int freqMHz) {
|
||||
//型号 -> kirin710之后 & 最高核心频率
|
||||
int level = 0;
|
||||
if (cpuName.startsWith("hi")) {
|
||||
//这个是海思的芯片中低端
|
||||
if (freqMHz <= 1600) { //1.5G 低端
|
||||
level = 0;
|
||||
} else if (freqMHz <= 2000) { //2GHz 低中端
|
||||
level = 1;
|
||||
}
|
||||
} else {
|
||||
//这个是海思麒麟的芯片
|
||||
if (freqMHz <= 1600) { //1.5G 低端
|
||||
level = 0;
|
||||
} else if (freqMHz <= 2000) { //2GHz 低中端
|
||||
level = 1;
|
||||
} else if (freqMHz <= 2500) { //2.2 2.3g 中高端
|
||||
level = 2;
|
||||
} else { //高端
|
||||
level = 3;
|
||||
}
|
||||
}
|
||||
|
||||
return level;
|
||||
}
|
||||
|
||||
public static final String Nexus_6P = "Nexus 6P";
|
||||
|
||||
/**
|
||||
* 获取设备名
|
||||
*
|
||||
* @return
|
||||
*/
|
||||
public static String getDeviceName() {
|
||||
String deviceName = "";
|
||||
if (Build.MODEL != null) deviceName = Build.MODEL;
|
||||
LogUtils.e(TAG,"deviceName: " + deviceName);
|
||||
return deviceName;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,57 @@
|
||||
/*
|
||||
* MIT License
|
||||
*
|
||||
* Copyright (c) 2023 Agora Community
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
|
||||
package io.agora.beautyapi.faceunity.utils
|
||||
|
||||
import io.agora.base.internal.Logging
|
||||
|
||||
object LogUtils {
|
||||
private const val beautyType = "FaceUnity"
|
||||
|
||||
|
||||
@JvmStatic
|
||||
fun i(tag: String, content: String, vararg args: Any) {
|
||||
val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}"
|
||||
Logging.log(Logging.Severity.LS_INFO, tag, consoleMessage)
|
||||
}
|
||||
|
||||
@JvmStatic
|
||||
fun d(tag: String, content: String, vararg args: Any) {
|
||||
val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}"
|
||||
Logging.d(tag, consoleMessage)
|
||||
}
|
||||
|
||||
@JvmStatic
|
||||
fun w(tag: String, content: String, vararg args: Any){
|
||||
val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}"
|
||||
Logging.w(tag, consoleMessage)
|
||||
}
|
||||
|
||||
@JvmStatic
|
||||
fun e(tag: String, content: String, vararg args: Any){
|
||||
val consoleMessage = "[BeautyAPI][$beautyType] : ${String.format(content, args)}"
|
||||
Logging.e(tag, consoleMessage)
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,80 @@
|
||||
/*
|
||||
* MIT License
|
||||
*
|
||||
* Copyright (c) 2023 Agora Community
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
|
||||
package io.agora.beautyapi.faceunity.utils
|
||||
|
||||
import android.os.Handler
|
||||
import android.os.Looper
|
||||
import io.agora.beautyapi.faceunity.BeautyStats
|
||||
import kotlin.math.max
|
||||
import kotlin.math.min
|
||||
|
||||
class StatsHelper(
|
||||
private val statsDuration: Long,
|
||||
private val onStatsChanged: (BeautyStats) -> Unit
|
||||
) {
|
||||
|
||||
private val mMainHandler = Handler(Looper.getMainLooper())
|
||||
private var mStartTime = 0L
|
||||
private var mCostList = mutableListOf<Long>()
|
||||
private var mCostMax = 0L
|
||||
private var mCostMin = Long.MAX_VALUE
|
||||
|
||||
fun once(cost: Long) {
|
||||
val curr = System.currentTimeMillis()
|
||||
if (mStartTime == 0L) {
|
||||
mStartTime = curr
|
||||
} else if (curr - mStartTime >= statsDuration) {
|
||||
mStartTime = curr
|
||||
var total = 0L
|
||||
mCostList.forEach {
|
||||
total += it
|
||||
}
|
||||
val average = total / mCostList.size
|
||||
val costMin = mCostMin
|
||||
val costMax = mCostMax
|
||||
mMainHandler.post {
|
||||
onStatsChanged.invoke(BeautyStats(costMin, costMax, average))
|
||||
}
|
||||
|
||||
mCostList.clear()
|
||||
mCostMax = 0L
|
||||
mCostMin = Long.MAX_VALUE
|
||||
}
|
||||
|
||||
mCostList.add(cost)
|
||||
mCostMax = max(mCostMax, cost)
|
||||
mCostMin = min(mCostMin, cost)
|
||||
}
|
||||
|
||||
fun reset() {
|
||||
mMainHandler.removeCallbacksAndMessages(null)
|
||||
mStartTime = 0
|
||||
mCostList.clear()
|
||||
mCostMax = 0L
|
||||
mCostMin = Long.MAX_VALUE
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@@ -0,0 +1,210 @@
|
||||
/*
|
||||
* MIT License
|
||||
*
|
||||
* Copyright (c) 2023 Agora Community
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
|
||||
package io.agora.beautyapi.faceunity.utils.egl;
|
||||
|
||||
import static android.opengl.EGL14.EGL_CONTEXT_CLIENT_VERSION;
|
||||
|
||||
import android.opengl.GLDebugHelper;
|
||||
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
import javax.microedition.khronos.egl.EGLConfig;
|
||||
import javax.microedition.khronos.egl.EGLContext;
|
||||
import javax.microedition.khronos.egl.EGLDisplay;
|
||||
import javax.microedition.khronos.egl.EGLSurface;
|
||||
|
||||
import io.agora.beautyapi.faceunity.utils.LogUtils;
|
||||
|
||||
public class EGLContextHelper {
|
||||
private static final String DEBUG_TAG = "EGLContextManager";
|
||||
private final int mRedSize = 8;
|
||||
private final int mGreenSize = 8;
|
||||
private final int mBlueSize = 8;
|
||||
private final int mAlphaSize = 0;
|
||||
private final int mDepthSize = 16;
|
||||
private final int mStencilSize = 0;
|
||||
private final int mRenderType = 4;
|
||||
public EGLContextHelper(){}
|
||||
|
||||
public void initEGL(EGLContext shareContext) throws Exception {
|
||||
mEGL = (EGL10) GLDebugHelper.wrap(EGLContext.getEGL(),
|
||||
GLDebugHelper.CONFIG_CHECK_GL_ERROR
|
||||
| GLDebugHelper.CONFIG_CHECK_THREAD, null);
|
||||
|
||||
if (mEGL == null) {
|
||||
throw new Exception("Couldn't get EGL");
|
||||
}
|
||||
|
||||
mGLDisplay = mEGL.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
|
||||
|
||||
if (mGLDisplay == null) {
|
||||
throw new Exception("Couldn't get display for GL");
|
||||
}
|
||||
|
||||
int[] curGLVersion = new int[2];
|
||||
mEGL.eglInitialize(mGLDisplay, curGLVersion);
|
||||
|
||||
LogUtils.i(DEBUG_TAG, "GL version = " + curGLVersion[0] + "."
|
||||
+ curGLVersion[1]);
|
||||
|
||||
int[] num_config = new int[1];
|
||||
if(!mEGL.eglChooseConfig(mGLDisplay, mConfigSpec, null, 1,
|
||||
num_config)){
|
||||
throw new IllegalArgumentException("eglChooseConfig failed");
|
||||
}
|
||||
int numConfigs = num_config[0];
|
||||
if (numConfigs <= 0) {
|
||||
throw new IllegalArgumentException(
|
||||
"No configs match configSpec");
|
||||
}
|
||||
|
||||
EGLConfig[] configs = new EGLConfig[numConfigs];
|
||||
if (!mEGL.eglChooseConfig(mGLDisplay, mConfigSpec, configs, numConfigs,
|
||||
num_config)) {
|
||||
throw new IllegalArgumentException("eglChooseConfig#2 failed");
|
||||
}
|
||||
mGLConfig = chooseConfig(mEGL, mGLDisplay, configs);
|
||||
if (mGLConfig == null) {
|
||||
mGLConfig = configs[0];
|
||||
}
|
||||
|
||||
int[] surfaceAttribs = {
|
||||
EGL10.EGL_WIDTH, 1,
|
||||
EGL10.EGL_HEIGHT, 1,
|
||||
EGL10.EGL_NONE
|
||||
};
|
||||
mGLSurface = mEGL.eglCreatePbufferSurface(mGLDisplay, mGLConfig, surfaceAttribs);
|
||||
|
||||
if (mGLSurface == null) {
|
||||
throw new Exception("Couldn't create new surface");
|
||||
}
|
||||
|
||||
int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE};
|
||||
mGLContext = mEGL.eglCreateContext(mGLDisplay, mGLConfig,
|
||||
shareContext, attrib_list);
|
||||
|
||||
if (mGLContext == null) {
|
||||
throw new Exception("Couldn't create new context");
|
||||
}
|
||||
|
||||
|
||||
// if (!mEGL.eglMakeCurrent(mGLDisplay, mGLSurface, mGLSurface, mGLContext)) {
|
||||
// throw new Exception("Failed to eglMakeCurrent");
|
||||
// }
|
||||
|
||||
}
|
||||
|
||||
public EGLContext getEGLContext() {
|
||||
return mGLContext;
|
||||
}
|
||||
|
||||
public EGLDisplay getGLDisplay() {
|
||||
return mGLDisplay;
|
||||
}
|
||||
|
||||
public EGLConfig getGLConfig() {
|
||||
return mGLConfig;
|
||||
}
|
||||
|
||||
public EGLSurface getGLSurface() {
|
||||
return mGLSurface;
|
||||
}
|
||||
|
||||
public EGL10 getEGL() {
|
||||
return mEGL;
|
||||
}
|
||||
|
||||
EGL10 mEGL;
|
||||
EGLDisplay mGLDisplay;
|
||||
EGLConfig mGLConfig;
|
||||
EGLSurface mGLSurface;
|
||||
EGLContext mGLContext;
|
||||
|
||||
int[] mConfigSpec = new int[]{
|
||||
EGL10.EGL_RED_SIZE, mRedSize,
|
||||
EGL10.EGL_GREEN_SIZE, mGreenSize,
|
||||
EGL10.EGL_BLUE_SIZE, mBlueSize,
|
||||
EGL10.EGL_ALPHA_SIZE, mAlphaSize,
|
||||
EGL10.EGL_DEPTH_SIZE, mDepthSize,
|
||||
EGL10.EGL_STENCIL_SIZE, mStencilSize,
|
||||
EGL10.EGL_RENDERABLE_TYPE, mRenderType,//egl版本 2.0
|
||||
EGL10.EGL_NONE};
|
||||
|
||||
public void release() {
|
||||
mEGL.eglMakeCurrent(mGLDisplay, EGL10.EGL_NO_SURFACE,
|
||||
EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
|
||||
mEGL.eglDestroySurface(mGLDisplay, mGLSurface);
|
||||
mEGL.eglDestroyContext(mGLDisplay, mGLContext);
|
||||
mEGL.eglTerminate(mGLDisplay);
|
||||
|
||||
LogUtils.i(DEBUG_TAG, "GL Cleaned up");
|
||||
}
|
||||
|
||||
public boolean eglMakeCurrent(){
|
||||
if(mGLContext == EGL10.EGL_NO_CONTEXT){
|
||||
return false;
|
||||
}else{
|
||||
return mEGL.eglMakeCurrent(mGLDisplay, mGLSurface, mGLSurface, mGLContext);
|
||||
}
|
||||
}
|
||||
|
||||
public boolean eglMakeNoCurrent(){
|
||||
return mEGL.eglMakeCurrent(mGLDisplay, EGL10.EGL_NO_SURFACE,
|
||||
EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
|
||||
}
|
||||
|
||||
private EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
|
||||
EGLConfig[] configs) {
|
||||
for (EGLConfig config : configs) {
|
||||
int d = findConfigAttrib(egl, display, config,
|
||||
EGL10.EGL_DEPTH_SIZE, 0);
|
||||
int s = findConfigAttrib(egl, display, config,
|
||||
EGL10.EGL_STENCIL_SIZE, 0);
|
||||
if ((d >= mDepthSize) && (s >= mStencilSize)) {
|
||||
int r = findConfigAttrib(egl, display, config,
|
||||
EGL10.EGL_RED_SIZE, 0);
|
||||
int g = findConfigAttrib(egl, display, config,
|
||||
EGL10.EGL_GREEN_SIZE, 0);
|
||||
int b = findConfigAttrib(egl, display, config,
|
||||
EGL10.EGL_BLUE_SIZE, 0);
|
||||
int a = findConfigAttrib(egl, display, config,
|
||||
EGL10.EGL_ALPHA_SIZE, 0);
|
||||
if ((r == mRedSize) && (g == mGreenSize)
|
||||
&& (b == mBlueSize) && (a == mAlphaSize)) {
|
||||
return config;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private int findConfigAttrib(EGL10 egl, EGLDisplay display,
|
||||
EGLConfig config, int attribute, int defaultValue) {
|
||||
int[] value = new int[1];
|
||||
if (egl.eglGetConfigAttrib(display, config, attribute, value)) {
|
||||
return value[0];
|
||||
}
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,84 @@
|
||||
/*
|
||||
* MIT License
|
||||
*
|
||||
* Copyright (c) 2023 Agora Community
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
|
||||
package io.agora.beautyapi.faceunity.utils.egl;
|
||||
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.opengl.GLES20;
|
||||
import android.opengl.GLES30;
|
||||
|
||||
public class GLCopyHelper {
|
||||
private final int bufferCount;
|
||||
|
||||
public GLCopyHelper(){
|
||||
this(1);
|
||||
}
|
||||
|
||||
public GLCopyHelper(int bufferCount){
|
||||
this.bufferCount = bufferCount;
|
||||
}
|
||||
|
||||
private int[] mDstFrameBuffer;
|
||||
private int[] mSrcFrameBuffer;
|
||||
|
||||
public void copy2DTextureToOesTexture(
|
||||
int srcTexture,
|
||||
int dstTexture,
|
||||
int width, int height,
|
||||
int index){
|
||||
if(mDstFrameBuffer == null){
|
||||
mDstFrameBuffer = new int[bufferCount];
|
||||
GLES20.glGenFramebuffers(bufferCount, mDstFrameBuffer, 0);
|
||||
}
|
||||
|
||||
if(mSrcFrameBuffer == null){
|
||||
mSrcFrameBuffer = new int[bufferCount];
|
||||
GLES20.glGenFramebuffers(bufferCount, mSrcFrameBuffer, 0);
|
||||
}
|
||||
|
||||
GLES30.glBindFramebuffer(GLES30.GL_READ_FRAMEBUFFER, mSrcFrameBuffer[index]);
|
||||
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, srcTexture);
|
||||
GLES30.glFramebufferTexture2D(GLES30.GL_READ_FRAMEBUFFER, GLES30.GL_COLOR_ATTACHMENT0, GLES30.GL_TEXTURE_2D, srcTexture, 0);
|
||||
GLES30.glBindFramebuffer(GLES30.GL_DRAW_FRAMEBUFFER, mDstFrameBuffer[index]);
|
||||
GLES30.glFramebufferTexture2D(GLES30.GL_DRAW_FRAMEBUFFER,
|
||||
GLES30.GL_COLOR_ATTACHMENT0, GLES11Ext.GL_TEXTURE_EXTERNAL_OES, dstTexture, 0);
|
||||
GLES30.glBlitFramebuffer(0, 0, width, height, 0, 0, width, height, GLES30.GL_COLOR_BUFFER_BIT, GLES30.GL_LINEAR);
|
||||
GLES30.glBindFramebuffer(GLES30.GL_DRAW_FRAMEBUFFER, 0);
|
||||
GLES30.glBindFramebuffer(GLES30.GL_READ_FRAMEBUFFER, 0);
|
||||
GLES30.glBindTexture(GLES30.GL_TEXTURE_2D, 0);
|
||||
GLES30.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
|
||||
}
|
||||
|
||||
public void release(){
|
||||
if(mDstFrameBuffer != null){
|
||||
GLES20.glDeleteFramebuffers(mDstFrameBuffer.length, mDstFrameBuffer, 0);
|
||||
mDstFrameBuffer = null;
|
||||
}
|
||||
|
||||
if(mSrcFrameBuffer != null){
|
||||
GLES20.glDeleteFramebuffers(mSrcFrameBuffer.length, mSrcFrameBuffer, 0);
|
||||
mSrcFrameBuffer = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,205 @@
|
||||
package io.agora.beautyapi.faceunity.utils.egl;
|
||||
|
||||
import android.graphics.Matrix;
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.opengl.GLES20;
|
||||
|
||||
import io.agora.base.internal.video.EglBase;
|
||||
import io.agora.base.internal.video.GlRectDrawer;
|
||||
import io.agora.base.internal.video.RendererCommon;
|
||||
|
||||
public class GLFrameBuffer {
|
||||
|
||||
private int mFramebufferId = -1;
|
||||
private int mTextureId = -1;
|
||||
private int mWidth, mHeight, mRotation;
|
||||
private boolean isFlipV, isFlipH, isTextureInner, isTextureChanged, isSizeChanged;
|
||||
|
||||
private RendererCommon.GlDrawer drawer;
|
||||
|
||||
private float[] mTexMatrix = GLUtils.IDENTITY_MATRIX;
|
||||
|
||||
public GLFrameBuffer() {
|
||||
|
||||
}
|
||||
|
||||
public boolean setSize(int width, int height) {
|
||||
if (mWidth != width || mHeight != height) {
|
||||
mWidth = width;
|
||||
mHeight = height;
|
||||
isSizeChanged = true;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public void setRotation(int rotation) {
|
||||
if (mRotation != rotation) {
|
||||
mRotation = rotation;
|
||||
}
|
||||
}
|
||||
|
||||
public void setFlipV(boolean flipV) {
|
||||
if (isFlipV != flipV) {
|
||||
isFlipV = flipV;
|
||||
}
|
||||
}
|
||||
|
||||
public void setFlipH(boolean flipH) {
|
||||
if (isFlipH != flipH) {
|
||||
isFlipH = flipH;
|
||||
}
|
||||
}
|
||||
|
||||
public void setTextureId(int textureId){
|
||||
if(mTextureId != textureId){
|
||||
deleteTexture();
|
||||
mTextureId = textureId;
|
||||
isTextureChanged = true;
|
||||
}
|
||||
}
|
||||
|
||||
public int getTextureId(){
|
||||
return mTextureId;
|
||||
}
|
||||
|
||||
public void setTexMatrix(float[] matrix) {
|
||||
if (matrix != null) {
|
||||
mTexMatrix = matrix;
|
||||
} else {
|
||||
mTexMatrix = GLUtils.IDENTITY_MATRIX;
|
||||
}
|
||||
}
|
||||
|
||||
public void resetTransform(){
|
||||
mTexMatrix = GLUtils.IDENTITY_MATRIX;
|
||||
isFlipH = isFlipV = false;
|
||||
mRotation = 0;
|
||||
}
|
||||
|
||||
public int process(int textureId, int textureType) {
|
||||
if (mWidth <= 0 && mHeight <= 0) {
|
||||
throw new RuntimeException("setSize firstly!");
|
||||
}
|
||||
|
||||
if(mTextureId == -1){
|
||||
mTextureId = createTexture(mWidth, mHeight);
|
||||
bindFramebuffer(mTextureId);
|
||||
isTextureInner = true;
|
||||
}else if(isTextureInner && isSizeChanged){
|
||||
GLES20.glDeleteTextures(1, new int[]{mTextureId}, 0);
|
||||
mTextureId = createTexture(mWidth, mHeight);
|
||||
bindFramebuffer(mTextureId);
|
||||
}else if(isTextureChanged){
|
||||
bindFramebuffer(mTextureId);
|
||||
}
|
||||
isTextureChanged = false;
|
||||
isSizeChanged = false;
|
||||
|
||||
if(drawer == null){
|
||||
drawer = new GlRectDrawer();
|
||||
}
|
||||
|
||||
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFramebufferId);
|
||||
GLUtils.checkGlError("glBindFramebuffer");
|
||||
|
||||
Matrix transform = RendererCommon.convertMatrixToAndroidGraphicsMatrix(mTexMatrix);
|
||||
transform.preTranslate(0.5f, 0.5f);
|
||||
transform.preRotate(mRotation, 0.f, 0.f);
|
||||
transform.preScale(
|
||||
isFlipH ? -1.f: 1.f,
|
||||
isFlipV ? -1.f: 1.f
|
||||
);
|
||||
transform.preTranslate(-0.5f, -0.5f);
|
||||
float[] matrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(transform);
|
||||
|
||||
synchronized (EglBase.lock){
|
||||
if(textureType == GLES11Ext.GL_TEXTURE_EXTERNAL_OES){
|
||||
drawer.drawOes(textureId,matrix, mWidth, mHeight, 0, 0, mWidth, mHeight);
|
||||
|
||||
}else{
|
||||
drawer.drawRgb(textureId, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight);
|
||||
}
|
||||
}
|
||||
|
||||
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_NONE);
|
||||
GLES20.glFinish();
|
||||
|
||||
return mTextureId;
|
||||
}
|
||||
|
||||
public void release(){
|
||||
deleteTexture();
|
||||
deleteFramebuffer();
|
||||
|
||||
if(drawer != null){
|
||||
drawer.release();
|
||||
drawer = null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void deleteFramebuffer() {
|
||||
if (mFramebufferId != -1) {
|
||||
GLES20.glDeleteFramebuffers(1, new int[]{mFramebufferId}, 0);
|
||||
mFramebufferId = -1;
|
||||
}
|
||||
}
|
||||
|
||||
public int createTexture(int width, int height){
|
||||
int[] textures = new int[1];
|
||||
GLES20.glGenTextures(1, textures, 0);
|
||||
GLUtils.checkGlError("glGenTextures");
|
||||
int textureId = textures[0];
|
||||
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
|
||||
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0,
|
||||
GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
|
||||
|
||||
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
|
||||
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
|
||||
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
|
||||
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
|
||||
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
|
||||
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
|
||||
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
|
||||
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
|
||||
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, GLES20.GL_NONE);
|
||||
|
||||
return textureId;
|
||||
}
|
||||
|
||||
public void resizeTexture(int textureId, int width, int height) {
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
|
||||
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0,
|
||||
GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, GLES20.GL_NONE);
|
||||
}
|
||||
|
||||
private void deleteTexture() {
|
||||
if (isTextureInner && mTextureId != -1) {
|
||||
GLES20.glDeleteTextures(1, new int[]{mTextureId}, 0);
|
||||
}
|
||||
isTextureInner = false;
|
||||
mTextureId = -1;
|
||||
}
|
||||
|
||||
private void bindFramebuffer(int textureId) {
|
||||
if(mFramebufferId == -1){
|
||||
int[] framebuffers = new int[1];
|
||||
GLES20.glGenFramebuffers(1, framebuffers, 0);
|
||||
GLUtils.checkGlError("glGenFramebuffers");
|
||||
mFramebufferId = framebuffers[0];
|
||||
}
|
||||
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mFramebufferId);
|
||||
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER,
|
||||
GLES20.GL_COLOR_ATTACHMENT0,
|
||||
GLES20.GL_TEXTURE_2D,
|
||||
textureId, 0);
|
||||
|
||||
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, GLES20.GL_NONE);
|
||||
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_NONE);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,180 @@
|
||||
/*
|
||||
* MIT License
|
||||
*
|
||||
* Copyright (c) 2023 Agora Community
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
|
||||
package io.agora.beautyapi.faceunity.utils.egl
|
||||
|
||||
import android.opengl.GLES20
|
||||
import android.util.Log
|
||||
import android.util.Size
|
||||
import java.util.concurrent.ConcurrentLinkedQueue
|
||||
|
||||
class GLTextureBufferQueue(
|
||||
private val glFrameBuffer: GLFrameBuffer = GLFrameBuffer(),
|
||||
private val cacheCount: Int = 6,
|
||||
private val loggable: Boolean = false
|
||||
) {
|
||||
private val TAG = "GLTextureBufferQueue"
|
||||
|
||||
private var cacheIndex = 0
|
||||
private val cacheTextureOuts = arrayOfNulls<TextureOut>(cacheCount)
|
||||
private val textureIdQueue = ConcurrentLinkedQueue<TextureOut>()
|
||||
|
||||
|
||||
fun enqueue(iN: TextureIn): Int {
|
||||
var size = textureIdQueue.size
|
||||
if (size < cacheCount) {
|
||||
var out = cacheTextureOuts[cacheIndex]
|
||||
val outSize = when (iN.rotation) {
|
||||
90, 270 -> Size(iN.height, iN.width)
|
||||
else -> Size(iN.width, iN.height)
|
||||
}
|
||||
|
||||
if (out == null) {
|
||||
val textureId = glFrameBuffer.createTexture(outSize.width, outSize.height)
|
||||
out = TextureOut(
|
||||
0,
|
||||
textureId,
|
||||
GLES20.GL_TEXTURE_2D,
|
||||
outSize.width,
|
||||
outSize.height,
|
||||
iN.isFrontCamera,
|
||||
iN.isMirror,
|
||||
)
|
||||
cacheTextureOuts[cacheIndex] = out
|
||||
} else if (out.width != outSize.width || out.height != outSize.height) {
|
||||
glFrameBuffer.resizeTexture(out.textureId, outSize.width, outSize.height)
|
||||
out = TextureOut(
|
||||
0,
|
||||
out.textureId,
|
||||
out.textureType,
|
||||
outSize.width,
|
||||
outSize.height,
|
||||
iN.isFrontCamera,
|
||||
iN.isMirror,
|
||||
)
|
||||
cacheTextureOuts[cacheIndex] = out
|
||||
} else if(out.isFrontCamera != iN.isFrontCamera){
|
||||
out = TextureOut(
|
||||
0,
|
||||
out.textureId,
|
||||
out.textureType,
|
||||
out.width,
|
||||
out.height,
|
||||
iN.isFrontCamera,
|
||||
iN.isMirror,
|
||||
)
|
||||
cacheTextureOuts[cacheIndex] = out
|
||||
}
|
||||
|
||||
glFrameBuffer.textureId = out.textureId
|
||||
glFrameBuffer.setSize(out.width, out.height)
|
||||
glFrameBuffer.resetTransform()
|
||||
glFrameBuffer.setRotation(iN.rotation)
|
||||
if (iN.transform != null) {
|
||||
glFrameBuffer.setTexMatrix(iN.transform)
|
||||
var flipH = iN.isFrontCamera
|
||||
if(iN.isMirror){
|
||||
flipH = !flipH
|
||||
}
|
||||
glFrameBuffer.setFlipH(flipH)
|
||||
} else {
|
||||
var flipH = !iN.isFrontCamera
|
||||
if(iN.isMirror){
|
||||
flipH = !flipH
|
||||
}
|
||||
glFrameBuffer.setFlipH(flipH)
|
||||
}
|
||||
glFrameBuffer.setFlipV(iN.flipV)
|
||||
glFrameBuffer.process(iN.textureId, iN.textureType)
|
||||
out.index = cacheIndex
|
||||
out.tag = iN.tag
|
||||
textureIdQueue.offer(out)
|
||||
if(loggable){
|
||||
Log.d(TAG, "TextureIdQueue enqueue index=$cacheIndex, size=$size")
|
||||
}
|
||||
cacheIndex = (cacheIndex + 1) % cacheCount
|
||||
size++
|
||||
|
||||
} else {
|
||||
if(loggable){
|
||||
Log.e(TAG, "TextureIdQueue is full!!")
|
||||
}
|
||||
}
|
||||
|
||||
return size
|
||||
}
|
||||
|
||||
fun dequeue(remove: Boolean = true): TextureOut? {
|
||||
val size = textureIdQueue.size
|
||||
val poll = if(remove){
|
||||
textureIdQueue.poll()
|
||||
}else{
|
||||
textureIdQueue.peek()
|
||||
}
|
||||
if(loggable){
|
||||
Log.d(TAG, "TextureIdQueue dequeue index=${poll?.index}, size=$size")
|
||||
}
|
||||
return poll
|
||||
}
|
||||
|
||||
fun reset() {
|
||||
cacheIndex = 0
|
||||
textureIdQueue.clear()
|
||||
}
|
||||
|
||||
fun release() {
|
||||
cacheIndex = 0
|
||||
cacheTextureOuts.forEachIndexed { index, textureOut ->
|
||||
if (textureOut != null) {
|
||||
GLES20.glDeleteTextures(1, intArrayOf(textureOut.textureId), 0)
|
||||
cacheTextureOuts[index] = null
|
||||
}
|
||||
}
|
||||
textureIdQueue.clear()
|
||||
glFrameBuffer.release()
|
||||
}
|
||||
|
||||
data class TextureIn(
|
||||
val textureId: Int,
|
||||
val textureType: Int,
|
||||
val width: Int,
|
||||
val height: Int,
|
||||
val rotation: Int,
|
||||
val flipV: Boolean,
|
||||
val isFrontCamera: Boolean,
|
||||
val isMirror: Boolean,
|
||||
val transform: FloatArray?,
|
||||
val tag: Any? = null
|
||||
)
|
||||
|
||||
data class TextureOut(
|
||||
var index: Int = 0,
|
||||
val textureId: Int,
|
||||
val textureType: Int,
|
||||
val width: Int,
|
||||
val height: Int,
|
||||
val isFrontCamera: Boolean,
|
||||
var tag: Any? = null
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,279 @@
|
||||
/*
|
||||
* MIT License
|
||||
*
|
||||
* Copyright (c) 2023 Agora Community
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
|
||||
package io.agora.beautyapi.faceunity.utils.egl;
|
||||
|
||||
import android.graphics.Bitmap;
|
||||
import android.graphics.BitmapFactory;
|
||||
import android.graphics.ImageFormat;
|
||||
import android.graphics.Rect;
|
||||
import android.graphics.YuvImage;
|
||||
import android.opengl.GLES11Ext;
|
||||
import android.opengl.GLES20;
|
||||
import android.opengl.Matrix;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.IntBuffer;
|
||||
import java.util.Objects;
|
||||
|
||||
import javax.microedition.khronos.egl.EGL10;
|
||||
import javax.microedition.khronos.egl.EGLContext;
|
||||
|
||||
import io.agora.beautyapi.faceunity.utils.LogUtils;
|
||||
|
||||
public class GLUtils {
|
||||
private static final String TAG = "GLUtils";
|
||||
public static final float[] IDENTITY_MATRIX = new float[16];
|
||||
|
||||
static {
|
||||
Matrix.setIdentityM(IDENTITY_MATRIX, 0);
|
||||
}
|
||||
|
||||
private GLUtils() {
|
||||
}
|
||||
|
||||
public static Bitmap getTexture2DImage(int textureID, int width, int height) {
|
||||
try {
|
||||
int[] oldFboId = new int[1];
|
||||
GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, IntBuffer.wrap(oldFboId));
|
||||
|
||||
int[] framebuffers = new int[1];
|
||||
GLES20.glGenFramebuffers(1, framebuffers, 0);
|
||||
int framebufferId = framebuffers[0];
|
||||
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, framebufferId);
|
||||
|
||||
int[] renderbuffers = new int[1];
|
||||
GLES20.glGenRenderbuffers(1, renderbuffers, 0);
|
||||
int renderId = renderbuffers[0];
|
||||
GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, renderId);
|
||||
GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_DEPTH_COMPONENT16, width, height);
|
||||
|
||||
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureID, 0);
|
||||
GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT, GLES20.GL_RENDERBUFFER, renderId);
|
||||
if (GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER) != GLES20.GL_FRAMEBUFFER_COMPLETE) {
|
||||
LogUtils.e(TAG, "Framebuffer error");
|
||||
}
|
||||
|
||||
ByteBuffer rgbaBuf = ByteBuffer.allocateDirect(width * height * 4);
|
||||
rgbaBuf.position(0);
|
||||
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaBuf);
|
||||
|
||||
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
|
||||
bitmap.copyPixelsFromBuffer(rgbaBuf);
|
||||
|
||||
GLES20.glDeleteRenderbuffers(1, IntBuffer.wrap(framebuffers));
|
||||
GLES20.glDeleteFramebuffers(1, IntBuffer.allocate(framebufferId));
|
||||
|
||||
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, oldFboId[0]);
|
||||
|
||||
return bitmap;
|
||||
} catch (Exception e) {
|
||||
LogUtils.e(TAG, e.toString());
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public static Bitmap getTextureOESImage(int textureID, int width, int height) {
|
||||
try {
|
||||
int[] oldFboId = new int[1];
|
||||
GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, IntBuffer.wrap(oldFboId));
|
||||
|
||||
int[] framebuffers = new int[1];
|
||||
GLES20.glGenFramebuffers(1, framebuffers, 0);
|
||||
int framebufferId = framebuffers[0];
|
||||
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, framebufferId);
|
||||
|
||||
int[] renderbuffers = new int[1];
|
||||
GLES20.glGenRenderbuffers(1, renderbuffers, 0);
|
||||
int renderId = renderbuffers[0];
|
||||
GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, renderId);
|
||||
GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_DEPTH_COMPONENT16, width, height);
|
||||
|
||||
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID, 0);
|
||||
GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT, GLES20.GL_RENDERBUFFER, renderId);
|
||||
if (GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER) != GLES20.GL_FRAMEBUFFER_COMPLETE) {
|
||||
LogUtils.e(TAG, "Framebuffer error");
|
||||
}
|
||||
|
||||
ByteBuffer rgbaBuf = ByteBuffer.allocateDirect(width * height * 4);
|
||||
rgbaBuf.position(0);
|
||||
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaBuf);
|
||||
|
||||
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
|
||||
bitmap.copyPixelsFromBuffer(rgbaBuf);
|
||||
|
||||
GLES20.glDeleteRenderbuffers(1, IntBuffer.wrap(framebuffers));
|
||||
GLES20.glDeleteFramebuffers(1, IntBuffer.allocate(framebufferId));
|
||||
|
||||
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, oldFboId[0]);
|
||||
|
||||
return bitmap;
|
||||
} catch (Exception e) {
|
||||
LogUtils.e(TAG, e.toString());
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public static Bitmap nv21ToBitmap(byte[] nv21, int width, int height) {
|
||||
Bitmap bitmap = null;
|
||||
try {
|
||||
YuvImage image = new YuvImage(nv21, ImageFormat.NV21, width, height, null);
|
||||
ByteArrayOutputStream stream = new ByteArrayOutputStream();
|
||||
image.compressToJpeg(new Rect(0, 0, width, height), 80, stream);
|
||||
bitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
|
||||
stream.close();
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
return bitmap;
|
||||
}
|
||||
|
||||
private static Bitmap readBitmap(int width, int height) {
|
||||
ByteBuffer rgbaBuf = ByteBuffer.allocateDirect(width * height * 4);
|
||||
rgbaBuf.position(0);
|
||||
GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaBuf);
|
||||
|
||||
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
|
||||
bitmap.copyPixelsFromBuffer(rgbaBuf);
|
||||
return bitmap;
|
||||
}
|
||||
|
||||
public static float[] createTransformMatrix(int rotation, boolean flipH, boolean flipV) {
|
||||
float[] renderMVPMatrix = new float[16];
|
||||
float[] tmp = new float[16];
|
||||
Matrix.setIdentityM(tmp, 0);
|
||||
|
||||
boolean _flipH = flipH;
|
||||
boolean _flipV = flipV;
|
||||
if (rotation % 180 != 0) {
|
||||
_flipH = flipV;
|
||||
_flipV = flipH;
|
||||
}
|
||||
|
||||
if (_flipH) {
|
||||
Matrix.rotateM(tmp, 0, tmp, 0, 180, 0, 1f, 0);
|
||||
}
|
||||
if (_flipV) {
|
||||
Matrix.rotateM(tmp, 0, tmp, 0, 180, 1f, 0f, 0);
|
||||
}
|
||||
|
||||
float _rotation = rotation;
|
||||
if (_rotation != 0) {
|
||||
if (_flipH != _flipV) {
|
||||
_rotation *= -1;
|
||||
}
|
||||
Matrix.rotateM(tmp, 0, tmp, 0, _rotation, 0, 0, 1);
|
||||
}
|
||||
|
||||
Matrix.setIdentityM(renderMVPMatrix, 0);
|
||||
Matrix.multiplyMM(renderMVPMatrix, 0, tmp, 0, renderMVPMatrix, 0);
|
||||
return renderMVPMatrix;
|
||||
}
|
||||
|
||||
public static EGLContext getCurrGLContext() {
|
||||
EGL10 egl = (EGL10) javax.microedition.khronos.egl.EGLContext.getEGL();
|
||||
if (egl != null && !Objects.equals(egl.eglGetCurrentContext(), EGL10.EGL_NO_CONTEXT)) {
|
||||
return egl.eglGetCurrentContext();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public static void checkGlError(String op) {
|
||||
int error = GLES20.glGetError();
|
||||
if (error != GLES20.GL_NO_ERROR) {
|
||||
String msg = op + ": glError 0x" + Integer.toHexString(error);
|
||||
LogUtils.e(TAG, msg);
|
||||
throw new RuntimeException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
public static int createProgram(String vertexSource, String fragmentSource) {
|
||||
int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
|
||||
if (vertexShader == 0) {
|
||||
return 0;
|
||||
}
|
||||
int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
|
||||
if (pixelShader == 0) {
|
||||
return 0;
|
||||
}
|
||||
int program = GLES20.glCreateProgram();
|
||||
checkGlError("glCreateProgram");
|
||||
if (program == 0) {
|
||||
LogUtils.e(TAG, "Could not create program");
|
||||
}
|
||||
GLES20.glAttachShader(program, vertexShader);
|
||||
checkGlError("glAttachShader");
|
||||
GLES20.glAttachShader(program, pixelShader);
|
||||
checkGlError("glAttachShader");
|
||||
GLES20.glLinkProgram(program);
|
||||
int[] linkStatus = new int[1];
|
||||
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
|
||||
if (linkStatus[0] != GLES20.GL_TRUE) {
|
||||
LogUtils.e(TAG, "Could not link program: ");
|
||||
LogUtils.e(TAG, GLES20.glGetProgramInfoLog(program));
|
||||
GLES20.glDeleteProgram(program);
|
||||
program = 0;
|
||||
}
|
||||
return program;
|
||||
}
|
||||
|
||||
public static int loadShader(int shaderType, String source) {
|
||||
int shader = GLES20.glCreateShader(shaderType);
|
||||
checkGlError("glCreateShader type=" + shaderType);
|
||||
GLES20.glShaderSource(shader, source);
|
||||
GLES20.glCompileShader(shader);
|
||||
int[] compiled = new int[1];
|
||||
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
|
||||
if (compiled[0] == 0) {
|
||||
LogUtils.e(TAG, "Could not compile shader " + shaderType + ":");
|
||||
LogUtils.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
|
||||
GLES20.glDeleteShader(shader);
|
||||
shader = 0;
|
||||
}
|
||||
return shader;
|
||||
}
|
||||
|
||||
public static int createTexture(int textureTarget, Bitmap bitmap, int minFilter,
|
||||
int magFilter, int wrapS, int wrapT) {
|
||||
int[] textureHandle = new int[1];
|
||||
|
||||
GLES20.glGenTextures(1, textureHandle, 0);
|
||||
checkGlError("glGenTextures");
|
||||
GLES20.glBindTexture(textureTarget, textureHandle[0]);
|
||||
checkGlError("glBindTexture " + textureHandle[0]);
|
||||
GLES20.glTexParameterf(textureTarget, GLES20.GL_TEXTURE_MIN_FILTER, minFilter);
|
||||
GLES20.glTexParameterf(textureTarget, GLES20.GL_TEXTURE_MAG_FILTER, magFilter); //线性插值
|
||||
GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_WRAP_S, wrapS);
|
||||
GLES20.glTexParameteri(textureTarget, GLES20.GL_TEXTURE_WRAP_T, wrapT);
|
||||
|
||||
if (bitmap != null) {
|
||||
android.opengl.GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
|
||||
}
|
||||
|
||||
checkGlError("glTexParameter");
|
||||
return textureHandle[0];
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,214 @@
|
||||
/*
|
||||
* MIT License
|
||||
*
|
||||
* Copyright (c) 2023 Agora Community
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
|
||||
package io.agora.beautyapi.faceunity.utils.egl
|
||||
|
||||
import android.opengl.GLES20
|
||||
import io.agora.beautyapi.faceunity.utils.LogUtils
|
||||
import java.util.concurrent.Callable
|
||||
import java.util.concurrent.ConcurrentLinkedQueue
|
||||
import java.util.concurrent.CountDownLatch
|
||||
import java.util.concurrent.Executors
|
||||
import java.util.concurrent.Future
|
||||
import javax.microedition.khronos.egl.EGLContext
|
||||
|
||||
class TextureProcessHelper(
|
||||
private val cacheCount: Int = 2
|
||||
) {
|
||||
private val TAG = "TextureProcessHelper"
|
||||
private val glTextureBufferQueueIn = GLTextureBufferQueue(cacheCount = cacheCount, loggable = true)
|
||||
private val glTextureBufferQueueOut = GLTextureBufferQueue(cacheCount = cacheCount, loggable = false)
|
||||
private val glFrameBuffer = GLFrameBuffer()
|
||||
private val futureQueue = ConcurrentLinkedQueue<Future<Int>>()
|
||||
private val workerThread = Executors.newSingleThreadExecutor()
|
||||
private val eglContextHelper =
|
||||
EGLContextHelper()
|
||||
private var eglContextBase: EGLContext? = null
|
||||
private var isReleased = false
|
||||
private var filter: ((GLTextureBufferQueue.TextureOut) -> Int)? = null
|
||||
private var isBegin = false
|
||||
private var frameIndex = 0
|
||||
|
||||
fun setFilter(filter: (GLTextureBufferQueue.TextureOut) -> Int) {
|
||||
this.filter = filter
|
||||
}
|
||||
|
||||
fun process(
|
||||
texId: Int, texType: Int,
|
||||
width: Int, height: Int, rotation: Int,
|
||||
transform: FloatArray,
|
||||
isFrontCamera: Boolean,
|
||||
isMirror: Boolean
|
||||
): Int {
|
||||
if (isReleased) {
|
||||
return -1
|
||||
}
|
||||
val currGLContext = GLUtils.getCurrGLContext() ?: return -1
|
||||
|
||||
if (eglContextBase == null) {
|
||||
eglContextBase = currGLContext
|
||||
executeSync {
|
||||
eglContextHelper.initEGL(eglContextBase)
|
||||
eglContextHelper.eglMakeCurrent()
|
||||
}
|
||||
} else if (eglContextBase != currGLContext) {
|
||||
eglContextBase = currGLContext
|
||||
executeSync {
|
||||
eglContextHelper.release()
|
||||
eglContextHelper.initEGL(eglContextBase)
|
||||
eglContextHelper.eglMakeCurrent()
|
||||
}
|
||||
}
|
||||
|
||||
glTextureBufferQueueIn.enqueue(
|
||||
GLTextureBufferQueue.TextureIn(
|
||||
texId,
|
||||
texType,
|
||||
width,
|
||||
height,
|
||||
rotation,
|
||||
false,
|
||||
isFrontCamera,
|
||||
isMirror,
|
||||
transform,
|
||||
frameIndex
|
||||
)
|
||||
)
|
||||
frameIndex ++
|
||||
|
||||
if (isReleased) {
|
||||
return -1
|
||||
}
|
||||
|
||||
futureQueue.offer(workerThread.submit(Callable {
|
||||
if (isReleased) {
|
||||
return@Callable -2
|
||||
}
|
||||
|
||||
val frame = glTextureBufferQueueIn.dequeue(false) ?: return@Callable -2
|
||||
val filterTexId = filter?.invoke(frame) ?: -1
|
||||
if (filterTexId >= 0) {
|
||||
glTextureBufferQueueOut.enqueue(
|
||||
GLTextureBufferQueue.TextureIn(
|
||||
filterTexId,
|
||||
GLES20.GL_TEXTURE_2D,
|
||||
frame.width,
|
||||
frame.height,
|
||||
0,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
null,
|
||||
frame.tag
|
||||
)
|
||||
)
|
||||
} else {
|
||||
glTextureBufferQueueOut.enqueue(
|
||||
GLTextureBufferQueue.TextureIn(
|
||||
frame.textureId,
|
||||
frame.textureType,
|
||||
frame.width,
|
||||
frame.height,
|
||||
0,
|
||||
false,
|
||||
false,
|
||||
true,
|
||||
null,
|
||||
frame.tag
|
||||
)
|
||||
)
|
||||
}
|
||||
glTextureBufferQueueIn.dequeue(true)
|
||||
return@Callable 0
|
||||
}))
|
||||
|
||||
var ret = 0
|
||||
if (isBegin || futureQueue.size >= cacheCount) {
|
||||
isBegin = true
|
||||
try {
|
||||
val get = futureQueue.poll()?.get() ?: -1
|
||||
if (get == 0) {
|
||||
val dequeue = glTextureBufferQueueOut.dequeue() ?: return -1
|
||||
glFrameBuffer.setSize(dequeue.width, dequeue.height)
|
||||
ret = glFrameBuffer.process(dequeue.textureId, dequeue.textureType)
|
||||
}
|
||||
}catch (e: Exception){
|
||||
LogUtils.e(TAG, "process end with exception: $e")
|
||||
}
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
fun reset(){
|
||||
if(frameIndex == 0){
|
||||
return
|
||||
}
|
||||
isBegin = false
|
||||
frameIndex = 0
|
||||
var future = futureQueue.poll()
|
||||
while (future != null) {
|
||||
future.cancel(true)
|
||||
future = futureQueue.poll()
|
||||
}
|
||||
glTextureBufferQueueIn.reset()
|
||||
// glFrameBuffer.release()
|
||||
executeSync {
|
||||
glTextureBufferQueueOut.reset()
|
||||
}
|
||||
}
|
||||
|
||||
fun size() = futureQueue.size
|
||||
|
||||
fun release() {
|
||||
isReleased = true
|
||||
filter = null
|
||||
isBegin = false
|
||||
frameIndex = 0
|
||||
var future = futureQueue.poll()
|
||||
while (future != null) {
|
||||
future.cancel(true)
|
||||
future = futureQueue.poll()
|
||||
}
|
||||
glTextureBufferQueueIn.release()
|
||||
glFrameBuffer.release()
|
||||
executeSync {
|
||||
glTextureBufferQueueOut.release()
|
||||
if (eglContextBase != null) {
|
||||
eglContextHelper.release()
|
||||
eglContextBase = null
|
||||
}
|
||||
}
|
||||
workerThread.shutdown()
|
||||
}
|
||||
|
||||
fun executeSync(run: () -> Unit) {
|
||||
val latch = CountDownLatch(1)
|
||||
workerThread.execute {
|
||||
run.invoke()
|
||||
latch.countDown()
|
||||
}
|
||||
latch.await()
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user