This commit is contained in:
Yutousama 2022-03-11 08:43:32 +08:00
commit 8f9ae202db
148 changed files with 23318 additions and 0 deletions

1
app/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/build

106
app/build.gradle Normal file
View File

@ -0,0 +1,106 @@
import java.text.SimpleDateFormat
plugins {
id 'com.android.application'
id 'kotlin-android'
}
android {
signingConfigs {
release {
storeFile file('D:\\AndroidKeys\\yutou.jks')
storePassword '34864394'
keyPassword '34864394'
keyAlias 'yutou'
}
}
compileSdk 31
defaultConfig {
applicationId "com.yutou.doormanager"
minSdk 21
targetSdk 30
versionCode 1
versionName "1.3.1-" + new SimpleDateFormat("MMddhhmmss").format(new Date())
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
ndk {
abiFilters "armeabi-v7a"
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
kotlinOptions {
jvmTarget = '1.8'
}
buildFeatures {
viewBinding true
}
}
dependencies {
implementation 'androidx.core:core-ktx:1.3.2'
implementation 'androidx.appcompat:appcompat:1.3.1'
implementation 'com.google.android.material:material:1.4.0'
implementation 'androidx.constraintlayout:constraintlayout:2.1.1'
implementation 'androidx.lifecycle:lifecycle-extensions:2.2.0'
implementation project(path: ':libWSLive')
testImplementation 'junit:junit:4.+'
androidTestImplementation 'androidx.test.ext:junit:1.1.3'
def camerax_version = "1.1.0-beta01"
implementation "androidx.camera:camera-core:${camerax_version}"
implementation "androidx.camera:camera-camera2:${camerax_version}"
implementation "androidx.camera:camera-lifecycle:${camerax_version}"
implementation "androidx.camera:camera-video:${camerax_version}"
implementation "androidx.camera:camera-view:${camerax_version}"
implementation "androidx.camera:camera-extensions:${camerax_version}"
implementation("com.squareup.okhttp3:okhttp:4.9.3")
implementation 'jp.co.cyberagent.android.gpuimage:gpuimage-library:1.4.1'
}
task buildApk(dependsOn: "assembleRelease") {
doFirst {
println '开始打包'
}
//dependsOn("assembleRelease")
doLast {
def files = new File("X:\\servier\\tools\\web\\apk\\door\\").listFiles()
for (File file : files) {
println file.name
file.delete()
}
def keystore = file("D:\\AndroidKeys\\yutou.jks")
def unsignedApk = file("build\\intermediates\\apk\\release\\app-release-unsigned.apk")
def signedApk = file("X:\\servier\\tools\\web\\apk\\door\\app-release-" + android.defaultConfig.versionName + ".apk")
def cmd = "jarsigner -verbose -keystore " + keystore + " -signedjar " + signedApk + " " + unsignedApk + " yutou -storepass 34864394"
println cmd
def pr = Runtime.getRuntime().exec("cmd /c ${cmd}")
def input = new BufferedReader(new InputStreamReader(pr.getInputStream(), "GBK"))
String line;
while ((line = input.readLine()) != null) {
println line
}
assert pr.waitFor() == 0
}
}

21
app/proguard-rules.pro vendored Normal file
View File

@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View File

@ -0,0 +1,24 @@
package com.yutou.doormanager
import androidx.test.platform.app.InstrumentationRegistry
import androidx.test.ext.junit.runners.AndroidJUnit4
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.Assert.*
/**
* Instrumented test, which will execute on an Android device.
*
* See [testing documentation](http://d.android.com/tools/testing).
*/
@RunWith(AndroidJUnit4::class)
class ExampleInstrumentedTest {
@Test
fun useAppContext() {
// Context of the app under test.
val appContext = InstrumentationRegistry.getInstrumentation().targetContext
assertEquals("com.yutou.doormanager", appContext.packageName)
}
}

View File

@ -0,0 +1,103 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.yutou.doormanager">
<uses-feature android:name="android.hardware.camera.any" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.REQUEST_INSTALL_PACKAGES" />
<uses-permission android:name="android.permission.RECEIVE_SMS" />
<uses-permission android:name="android.permission.SEND_SMS" />
<uses-permission android:name="android.permission.WRITE_SMS" />
<uses-permission android:name="android.permission.READ_SMS" />
<uses-permission
android:name="android.permission.WRITE_EXTERNAL_STORAGE"
android:maxSdkVersion="28" />
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/Theme.Design.NoActionBar"
android:usesCleartextTraffic="true">
<meta-data
android:name="com.google.android.actions"
android:resource="@xml/provider_paths" />
<activity
android:name=".MainActivity"
android:exported="true"
android:screenOrientation="fullSensor">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<activity android:name=".SMSActivity">
<intent-filter>
<action android:name="android.intent.action.SEND"/>
<action android:name="android.intent.action.SENDTO"/>
<category android:name="android.intent.category.DEFAULT"/>
<category android:name="android.intent.category.BROWSABLE"/>
<data android:scheme="sms"/>
<data android:scheme="smsto"/>
<data android:scheme="mms"/>
<data android:scheme="mmsto"/>
</intent-filter>
</activity>
<provider
android:name="androidx.core.content.FileProvider"
android:authorities="${applicationId}.fileprovider"
android:exported="false"
android:grantUriPermissions="true">
<meta-data
android:name="android.support.FILE_PROVIDER_PATHS"
android:resource="@xml/provider_paths" />
</provider>
<!-- BroadcastReceiver that listens for incoming SMS messages -->
<receiver
android:name=".SmsReceiver"
android:permission="android.permission.BROADCAST_SMS">
<intent-filter android:priority="2147483647">
<action android:name="android.provider.Telephony.SMS_RECEIVED" />
<action android:name="android.provider.Telephony.SMS_DELIVER" />
</intent-filter>
</receiver>
<!-- BroadcastReceiver that listens for incoming MMS messages -->
<receiver
android:name=".MmsReceiver"
android:permission="android.permission.BROADCAST_WAP_PUSH">
<intent-filter>
<action android:name="android.provider.Telephony.WAP_PUSH_DELIVER"/>
<data android:mimeType="application/vnd.wap.mms-message"/>
</intent-filter>
</receiver>
<!-- Service that delivers messages from the phone "quick response" -->
<service
android:name=".SmsSendService"
android:exported="true"
android:permission="android.permission.SEND_RESPOND_VIA_MESSAGE">
<intent-filter>
<action android:name="android.intent.action.RESPOND_VIA_MESSAGE" />
<category android:name="android.intent.category.DEFAULT" />
<data android:scheme="sms" />
<data android:scheme="smsto" />
<data android:scheme="mms" />
<data android:scheme="mmsto" />
</intent-filter>
</service>
</application>
</manifest>

View File

@ -0,0 +1,15 @@
package com.yutou.doormanager;
import android.app.Service;
import android.content.Intent;
import android.os.IBinder;
import androidx.annotation.Nullable;
public class HeadlessSmsSendService extends Service {
@Nullable
@Override
public IBinder onBind(Intent intent) {
return null;
}
}

View File

@ -0,0 +1,263 @@
package com.yutou.doormanager
import android.Manifest
import android.content.Intent
import android.content.IntentFilter
import android.graphics.Rect
import android.net.Uri
import android.os.Build
import android.os.Bundle
import android.os.Handler
import android.os.Looper
import android.provider.Telephony
import android.widget.Button
import android.widget.Toast
import androidx.appcompat.app.AlertDialog
import androidx.appcompat.app.AppCompatActivity
import androidx.core.app.ActivityCompat
import androidx.core.content.FileProvider
import com.yutou.doormanager.utils.*
import jp.co.cyberagent.android.gpuimage.GPUImageAddBlendFilter
import me.lake.librestreaming.core.listener.RESConnectionListener
import me.lake.librestreaming.filter.hardvideofilter.BaseHardVideoFilter
import me.lake.librestreaming.filter.hardvideofilter.HardVideoGroupFilter
import me.lake.librestreaming.ws.StreamAVOption
import me.lake.librestreaming.ws.StreamLiveCameraView
import me.lake.librestreaming.ws.filter.hardfilter.WatermarkFilter
import me.lake.librestreaming.ws.filter.hardfilter.extra.GPUImageCompatibleFilter
import org.json.JSONObject
import java.io.File
import java.util.*
class MainActivity : AppCompatActivity() {
private lateinit var button_up: Button
private lateinit var button_down: Button
private lateinit var button_screen: Button
private lateinit var button_open: Button
private lateinit var button_af: Button
private lateinit var mLiveCameraView: StreamLiveCameraView
private var zoom = 0.0f
private var isUpdate = true
public lateinit var myDataListener: MyDataListener
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
ActivityCompat.requestPermissions(
this, arrayOf(
Manifest.permission.WRITE_EXTERNAL_STORAGE,
Manifest.permission.CAMERA,
Manifest.permission.RECORD_AUDIO,
Manifest.permission.SEND_SMS
), 0
)
initView()
startRTMP()
myDataListener = MyDataListener()
DataUtils.addListener(myDataListener)
val intent = Intent(Telephony.Sms.Intents.ACTION_CHANGE_DEFAULT)
intent.putExtra(
Telephony.Sms.Intents.EXTRA_PACKAGE_NAME,
packageName
)
startActivity(intent)
val batteryLevelRcvr=SmsReceiver()
val batteryLevelFilter= IntentFilter(Intent.ACTION_BATTERY_CHANGED)
registerReceiver(batteryLevelRcvr, batteryLevelFilter)
Toast.makeText(this,"version = ${ packageManager.getPackageInfo(packageName, 0).versionName}",Toast.LENGTH_LONG).show()
}
private fun initView() {
button_up = findViewById(R.id.up)
button_down = findViewById(R.id.down)
button_screen = findViewById(R.id.screen)
button_open = findViewById(R.id.openPC)
button_af = findViewById(R.id.af)
button_up.setOnClickListener {
zoom += 0.1f
if (zoom > 1.0f) {
zoom = 1.0f
}
mLiveCameraView.setZoomByPercent(zoom)
uploadZoom(zoom)
}
button_down.setOnClickListener {
zoom -= 0.1f
if (zoom < 0f) {
zoom = 0.0f
}
mLiveCameraView.setZoomByPercent(zoom)
uploadZoom(zoom)
}
button_screen.setOnClickListener {
mLiveCameraView.takeScreenShot {
Utils.upload(it, this)
Handler(Looper.getMainLooper()).post {
Toast.makeText(this@MainActivity, "已拍照", Toast.LENGTH_LONG).show()
}
}
}
button_open.setOnClickListener {
val json = JSONObject()
json.put("type", "nas")
HttpUtils.post(
"http://192.168.31.88:8000/tools/openpc.do",
json,
null,
object : HttpListener {
override fun data(code: Int, data: String) {
Handler(Looper.getMainLooper()).post {
Toast.makeText(this@MainActivity, "开机成功:${data}", Toast.LENGTH_LONG)
.show()
}
}
})
}
button_af.setOnClickListener {
mLiveCameraView.setCreamAr()
}
}
private fun uploadZoom(zoom: Float) {
val json = JSONObject()
json.put("zoom", zoom)
HttpUtils.post("http://192.168.31.88:8000/door/zoom.do", json, null, object : HttpListener {
override fun data(code: Int, data: String) {
if (data == "1") {
Handler(Looper.getMainLooper()).post {
Toast.makeText(this@MainActivity, "设置成功:${zoom}", Toast.LENGTH_LONG).show()
}
}
}
})
}
lateinit var watermark: WatermarkFilter
private fun startRTMP() {
mLiveCameraView = findViewById(R.id.stream_previewView)
mLiveCameraView.setActivity(this)
val option = StreamAVOption().apply {
streamUrl = "rtmp://192.168.31.88/live"
}
mLiveCameraView.let {
it.init(this, option)
it.addStreamStateListener(object : RESConnectionListener {
override fun onOpenConnectionResult(result: Int) {
mLiveCameraView.setCreamAr()
}
override fun onWriteError(errno: Int) {
}
override fun onCloseConnectionResult(result: Int) {
}
})
val files: LinkedList<BaseHardVideoFilter> = LinkedList()
watermark =
WatermarkFilter(
WatermarkFilter.fromText(32f, Utils.getTime()),
Rect(50, 50, 800, 150)
)
// it.setMirror(true, true, true)
files.add(GPUImageCompatibleFilter(GPUImageAddBlendFilter()))
files.add(watermark)
it.setHardVideoFilter(HardVideoGroupFilter(files))
it.startStreaming(option.streamUrl)
Timer().schedule(object : TimerTask() {
override fun run() {
watermark.updateText(32f, Utils.getTime()+" 推流:${mLiveCameraView.avSpeed}")
if (!mLiveCameraView.isStreaming||mLiveCameraView.avSpeed==0) {
mLiveCameraView.stopStreaming()
mLiveCameraView.startStreaming("rtmp://192.168.31.88/live")
}
}
}, 0, 1000)
}
}
override fun onSaveInstanceState(outState: Bundle) {
super.onSaveInstanceState(outState)
}
override fun onDestroy() {
super.onDestroy()
mLiveCameraView.destroy()
DataUtils.removeListener(myDataListener)
}
inner class MyDataListener : DataListener {
override fun out(json: JSONObject) {
Handler(Looper.getMainLooper()).post {
zoom = json.getDouble("zoom").toFloat()
mLiveCameraView.setZoomByPercent(zoom)
if (json.getString("restart").equals("1")) {
val intent = packageManager.getLaunchIntentForPackage(packageName);
intent!!.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(intent)
android.os.Process.killProcess(android.os.Process.myPid());
}
if (!json.getString("audio").equals("null"))
AudioUtils().play(json.getString("audio"))
if (!json.isNull("update")) {
val versionName = packageManager.getPackageInfo(packageName, 0).versionName
if (json.getJSONObject("update").getString("versionName") != versionName) {
val url = json.getJSONObject("update").getString("url")
if (isUpdate) {
isUpdate = false
update(url, json.getJSONObject("update").getString("versionName"))
}
}
}
if(!json.isNull("af")){
mLiveCameraView.setCreamAr()
}
}
}
}
private fun update(url: String, version: String) {
val dialog = AlertDialog.Builder(this).apply {
val appVersionName = packageManager.getPackageInfo(packageName, 0).versionName
setTitle("检测到版本更新")
setMessage("当前版本号:${appVersionName}\n更新版本号:${version}\n是否更新?")
setPositiveButton("更新") { _, _ ->
HttpUtils.download(
url,
filesDir.absolutePath + File.separator + "download.apk",
object : HttpListener {
override fun data(code: Int, data: String) {
val intent = Intent(Intent.ACTION_VIEW)
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK)
var apkUri = Uri.fromFile(File(data))
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
intent.addFlags(Intent.FLAG_GRANT_READ_URI_PERMISSION)
apkUri = FileProvider.getUriForFile(
this@MainActivity, "$packageName.fileprovider",
File(data)
)
}
intent.setDataAndType(apkUri, "application/vnd.android.package-archive")
startActivity(intent)
}
});
}
setNegativeButton(
"取消"
) { p0, _ -> p0?.cancel() }
}
dialog.show()
}
}

View File

@ -0,0 +1,12 @@
package com.yutou.doormanager;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
public class MmsReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
}
}

View File

@ -0,0 +1,46 @@
package com.yutou.doormanager
import android.app.Activity
import android.app.Application
import android.os.Bundle
import com.yutou.doormanager.utils.DataUtils
import java.lang.ref.WeakReference
class MyApplication: Application() {
companion object{
lateinit var activitys:WeakReference<Activity>
}
override fun onCreate() {
super.onCreate()
registerActivityLifecycleCallbacks(object : ActivityLifecycleCallbacks {
override fun onActivityCreated(p0: Activity, p1: Bundle?) {
activitys=WeakReference(p0)
}
override fun onActivityStarted(p0: Activity) {
TODO("Not yet implemented")
}
override fun onActivityResumed(p0: Activity) {
TODO("Not yet implemented")
}
override fun onActivityPaused(p0: Activity) {
TODO("Not yet implemented")
}
override fun onActivityStopped(p0: Activity) {
TODO("Not yet implemented")
}
override fun onActivitySaveInstanceState(p0: Activity, p1: Bundle) {
TODO("Not yet implemented")
}
override fun onActivityDestroyed(p0: Activity) {
TODO("Not yet implemented")
}
})
}
}

View File

@ -0,0 +1,12 @@
package com.yutou.doormanager
import androidx.lifecycle.ViewModel
import kotlin.properties.ReadOnlyProperty
import kotlin.reflect.KProperty
class MyViewModel:ViewModel() {
var index:Int = 0
val data:String by lazy {
"data = "+index
}
}

View File

@ -0,0 +1,14 @@
package com.yutou.doormanager;
import android.app.Activity;
import android.os.Bundle;
import android.telephony.SmsManager;
import androidx.annotation.Nullable;
public class SMSActivity extends Activity {
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
}

View File

@ -0,0 +1,44 @@
package com.yutou.doormanager;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.os.BatteryManager;
import com.yutou.doormanager.utils.SmsUtils;
public class SmsReceiver extends BroadcastReceiver {
private static boolean isLowPower = true;
private static boolean isPower = true;
public static int powerData=0;
private static int power=100;
@Override
public void onReceive(Context context, Intent intent) {
int status = intent.getIntExtra("status", -1);
int rawlevel = intent.getIntExtra("level", -1);
int scale = intent.getIntExtra("scale", -1);
int level = -1;
if(rawlevel >= 0 && scale > 0){
level = (rawlevel*100)/scale;
}
powerData=status;
if (status == BatteryManager.BATTERY_STATUS_DISCHARGING) {
isPower = true;
if (isLowPower) {
SmsUtils.Companion.sendSms("监控电源已断开", context);
isLowPower = false;
}
if(level!=power&&level%10==0){
SmsUtils.Companion.sendSms("监控剩余电量:"+level,context);
power=level;
}
} else {
isLowPower = true;
if (isPower) {
SmsUtils.Companion.sendSms("监控电源已连接", context);
isPower = false;
}
}
}
}

View File

@ -0,0 +1,22 @@
package com.yutou.doormanager;
import android.app.Service;
import android.content.Intent;
import android.os.Binder;
import android.os.IBinder;
import androidx.annotation.Nullable;
public class SmsSendService extends Service {
class MyBinder extends Binder {
public SmsSendService getService(){
return SmsSendService.this;
}
}
private MyBinder binder=new MyBinder();
@Nullable
@Override
public IBinder onBind(Intent intent) {
return binder;
}
}

View File

@ -0,0 +1,8 @@
package com.yutou.doormanager
class TestData {
val str by lazy {
println("Init lazy")
"Hello World"
}
}

View File

@ -0,0 +1,21 @@
package com.yutou.doormanager.utils
import android.media.MediaPlayer
class AudioUtils() {
fun play(url: String) {
println("url = ${url}")
val player = MediaPlayer()
player.isLooping = false
player.setOnCompletionListener {
println(" --> play over")
it.release()
}
//setAudioStreamType(AudioManager.STREAM_MUSIC)
player.setDataSource(url)
player.prepare() // might take long! (for buffering, etc)
player.start()
}
}

View File

@ -0,0 +1,7 @@
package com.yutou.doormanager.utils
import org.json.JSONObject
interface DataListener {
fun out(json:JSONObject)
}

View File

@ -0,0 +1,50 @@
package com.yutou.doormanager.utils
import org.json.JSONObject
import java.util.*
import kotlin.collections.ArrayList
class DataUtils {
var listener= ArrayList<DataListener>()
companion object{
private var instance:DataUtils?=null
get() {
if(field==null){
field= DataUtils()
}
return field
}
@Synchronized
fun get():DataUtils{
return instance!!
}
fun addListener(_listener: DataListener){
instance!!.listener.add(_listener)
}
fun removeListener(_listener: DataListener){
instance!!.listener.remove(_listener)
}
}
init {
run()
}
private fun run(){
Timer().schedule(object : TimerTask() {
override fun run() {
val json=JSONObject()
json.put("def","")
HttpUtils.post("http://192.168.31.88:8000/door/data.do",json,null,object :HttpListener{
override fun data(code: Int, data: String) {
if(code==200){
listener.forEach { it.out(JSONObject(data)) }
}
}
})
}
},0,1000)
}
}

View File

@ -0,0 +1,5 @@
package com.yutou.doormanager.utils
interface HttpListener{
fun data(code:Int,data:String)
}

View File

@ -0,0 +1,88 @@
package com.yutou.doormanager.utils
import okhttp3.*
import okhttp3.RequestBody.Companion.asRequestBody
import org.json.JSONObject
import java.io.File
import java.io.FileOutputStream
import java.io.IOException
class HttpUtils {
companion object {
fun uploadImage(body: JSONObject) {
post(
"http://192.168.31.88:802/qq/file.do",
body,
File(body.getString("filePath")),
object : HttpListener {
override fun data(code: Int, data: String) {
}
})
}
fun post(url: String, json: JSONObject, file: File?, listener: HttpListener) {
val client = OkHttpClient()
val builder = Request.Builder()
builder.url(url)
.post(
MultipartBody.Builder().run {
if (file != null) {
addFormDataPart(
"image",
json.getString("fileName"),
file.asRequestBody()
)
}
json.keys().forEach {
addFormDataPart(it, json.getString(it))
}
build()
}
)
client.newCall(builder.build()).enqueue(object : Callback {
override fun onFailure(call: Call, e: IOException) {
print("failure = ")
println(e)
}
override fun onResponse(call: Call, response: Response) {
listener.data(response.code, response.body!!.string())
}
})
}
fun download(url: String, filePath: String, listener: HttpListener) {
val client = OkHttpClient()
val builder = Request.Builder().url(url).build()
client.newCall(builder).enqueue(object :Callback{
override fun onFailure(call: Call, e: IOException) {
println(e)
}
override fun onResponse(call: Call, response: Response) {
if(response.isSuccessful){
if(File(filePath).exists()){
File(filePath).delete()
}
println("文件大小:"+response.body?.contentLength())
val inputStream=response.body?.byteStream()
val outputStream=FileOutputStream(filePath)
val bytes= ByteArray(1024)
var len=0
while ((inputStream?.read(bytes)).also { len=it!! }!=-1){
outputStream.write(bytes,0,len)
outputStream.flush()
}
outputStream.close()
inputStream?.close()
println("实际文件大小:"+File(filePath).length())
listener.data(0,filePath)
}
}
})
}
}
}

View File

@ -0,0 +1,29 @@
package com.yutou.doormanager.utils
import android.app.PendingIntent
import android.content.Context
import android.content.Intent
import android.os.Build
import android.telephony.SmsManager
class SmsUtils {
companion object{
fun sendSms(message:String, context:Context){
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
var smsManager= context.getSystemService(SmsManager::class.java)
val sentIntent = Intent("SENT_SMS_ACTION")
val sentPI = PendingIntent.getBroadcast(context, 0, sentIntent, 0)
val deliverIntent = Intent("DELIVERED_SMS_ACTION")
val deliverPI = PendingIntent.getBroadcast(context, 0, deliverIntent, 0)
if(smsManager==null)
smsManager=SmsManager.getDefault()
println("smsManager = ${smsManager}")
smsManager.sendTextMessage("+8613687358829", null, message, sentPI, deliverPI)
} else {
return
}
}
}
}

View File

@ -0,0 +1,32 @@
package com.yutou.doormanager.utils
import android.content.Context
import android.graphics.Bitmap
import org.json.JSONObject
import java.io.File
import java.io.FileOutputStream
import java.text.SimpleDateFormat
import java.util.*
class Utils {
companion object {
fun upload(bitmap: Bitmap, context: Context) {
val file = File(context.cacheDir.absolutePath + File.separator + "tmp.png")
val out = FileOutputStream(file)
bitmap.compress(Bitmap.CompressFormat.PNG, 100, out)
out.apply {
flush()
close()
}
val json=JSONObject()
json.put("fileName",file.name)
json.put("filePath",file.absolutePath)
HttpUtils.uploadImage(json)
// file.deleteOnExit()
}
fun getTime():String{
return SimpleDateFormat("yyyy年MM月dd日 HH:mm:ss", Locale.CHINA).format(Date())
}
}
}

View File

@ -0,0 +1,30 @@
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
<aapt:attr name="android:fillColor">
<gradient
android:endX="85.84757"
android:endY="92.4963"
android:startX="42.9492"
android:startY="49.59793"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>

View File

@ -0,0 +1,170 @@
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillColor="#3DDC84"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
</vector>

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.9 KiB

View File

@ -0,0 +1,60 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<!-- <androidx.camera.view.PreviewView
android:id="@+id/viewFinder"
android:layout_width="match_parent"
android:layout_height="match_parent" />-->
<LinearLayout
android:id="@+id/linearLayout"
android:layout_width="wrap_content"
android:gravity="center"
android:layout_height="0dp"
android:orientation="vertical"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toStartOf="@+id/stream_previewView"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent">
<Button
android:id="@+id/up"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginBottom="10dp"
android:text="@string/button_up" />
<Button
android:id="@+id/down"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/button_down" />
<Button
android:id="@+id/screen"
android:layout_marginTop="10dp"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/button_screen" />
<Button
android:id="@+id/af"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/button_af" />
</LinearLayout>
<me.lake.librestreaming.ws.StreamLiveCameraView
android:id="@+id/stream_previewView"
android:layout_width="0dp"
android:layout_height="match_parent"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toEndOf="@+id/linearLayout"
app:layout_constraintTop_toTopOf="parent" />
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -0,0 +1,64 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<!-- <androidx.camera.view.PreviewView
android:id="@+id/viewFinder"
android:layout_width="match_parent"
android:layout_height="match_parent" />-->
<me.lake.librestreaming.ws.StreamLiveCameraView
android:id="@+id/stream_previewView"
android:layout_width="0dp"
android:layout_height="0dp"
android:layout_marginBottom="64dp"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
<LinearLayout
android:id="@+id/linearLayout"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:gravity="center"
android:orientation="horizontal"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent">
<Button
android:id="@+id/up"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/button_up"/>
<Button
android:id="@+id/down"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/button_down" />
<Button
android:id="@+id/screen"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/button_screen" />
<Button
android:id="@+id/openPC"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/button_open" />
<Button
android:id="@+id/af"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/button_af" />
</LinearLayout>
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 982 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.6 KiB

View File

@ -0,0 +1,16 @@
<resources xmlns:tools="http://schemas.android.com/tools">
<!-- Base application theme. -->
<style name="Theme.DoorManager" parent="Theme.MaterialComponents.DayNight.DarkActionBar">
<!-- Primary brand color. -->
<item name="colorPrimary">@color/purple_200</item>
<item name="colorPrimaryVariant">@color/purple_700</item>
<item name="colorOnPrimary">@color/black</item>
<!-- Secondary brand color. -->
<item name="colorSecondary">@color/teal_200</item>
<item name="colorSecondaryVariant">@color/teal_200</item>
<item name="colorOnSecondary">@color/black</item>
<!-- Status bar color. -->
<item name="android:statusBarColor" tools:targetApi="l">?attr/colorPrimaryVariant</item>
<!-- Customize your theme here. -->
</style>
</resources>

View File

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="purple_200">#FFBB86FC</color>
<color name="purple_500">#FF6200EE</color>
<color name="purple_700">#FF3700B3</color>
<color name="teal_200">#FF03DAC5</color>
<color name="teal_700">#FF018786</color>
<color name="black">#FF000000</color>
<color name="white">#FFFFFFFF</color>
</resources>

View File

@ -0,0 +1,8 @@
<resources>
<string name="app_name">DoorManager</string>
<string name="button_up">+</string>
<string name="button_down">-</string>
<string name="button_screen">拍照</string>
<string name="button_open">开机</string>
<string name="button_af">对焦</string>
</resources>

View File

@ -0,0 +1,16 @@
<resources xmlns:tools="http://schemas.android.com/tools">
<!-- Base application theme. -->
<style name="Theme.DoorManager" parent="Theme.MaterialComponents.DayNight.DarkActionBar">
<!-- Primary brand color. -->
<item name="colorPrimary">@color/purple_500</item>
<item name="colorPrimaryVariant">@color/purple_700</item>
<item name="colorOnPrimary">@color/white</item>
<!-- Secondary brand color. -->
<item name="colorSecondary">@color/teal_200</item>
<item name="colorSecondaryVariant">@color/teal_700</item>
<item name="colorOnSecondary">@color/black</item>
<!-- Status bar color. -->
<item name="android:statusBarColor" tools:targetApi="l">?attr/colorPrimaryVariant</item>
<!-- Customize your theme here. -->
</style>
</resources>

View File

@ -0,0 +1,7 @@
<?xml version ="1.0" encoding ="utf-8"?><!-- Learn More about how to use App Actions: https://developer.android.com/guide/actions/index.html -->
<paths xmlns:android="http://schemas.android.com/apk/res/android">
<root-path name="root" path=""/>
<files-path
name="files"
path="." />
</paths>

View File

@ -0,0 +1,17 @@
package com.yutou.doormanager
import org.junit.Test
import org.junit.Assert.*
/**
* Example local unit test, which will execute on the development machine (host).
*
* See [testing documentation](http://d.android.com/tools/testing).
*/
class ExampleUnitTest {
@Test
fun addition_isCorrect() {
assertEquals(4, 2 + 2)
}
}

19
build.gradle Normal file
View File

@ -0,0 +1,19 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
google()
mavenCentral()
}
dependencies {
classpath 'com.android.tools.build:gradle:7.1.2'
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:1.6.0"
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}

22
gradle.properties Normal file
View File

@ -0,0 +1,22 @@
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app"s APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Automatically convert third-party libraries to use AndroidX
android.enableJetifier=true
# Kotlin code style for this project: "official" or "obsolete":
kotlin.code.style=official
android.useDeprecatedNdk=true

1
libWSLive/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/build

42
libWSLive/build.gradle Normal file
View File

@ -0,0 +1,42 @@
apply plugin: 'com.android.library'
android {
compileSdkVersion 31
defaultConfig {
targetSdkVersion 31
ndk{
abiFilters "armeabi-v7a"
}
}
lintOptions {
abortOnError false
checkReleaseBuilds false
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
/* externalNativeBuild{
ndkBuild{
path file("src/main/jni/rtmp/Android.mk")
}
}*/
/* sourceSets {
main {
jniLibs.srcDirs 'src/main/jniLibs'
jni.srcDirs = []
}
}*/
}
dependencies {
implementation 'jp.co.cyberagent.android.gpuimage:gpuimage-library:1.4.1'
}

17
libWSLive/proguard-rules.pro vendored Normal file
View File

@ -0,0 +1,17 @@
# Add project specific ProGuard rules here.
# By default, the flags in this file are appended to flags specified
# in /home/lake/Android/Sdk/tools/proguard/proguard-android.txt
# You can edit the include path and order by changing the proguardFiles
# directive in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# Add any project specific keep options here:
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}

View File

@ -0,0 +1,17 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="me.lake.librestreaming">
<uses-permission android:name="android.permission.CAMERA"/>
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.FLASHLIGHT" />
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS"/>
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<application android:label="@string/app_name"
android:largeHeap="true">
</application>
</manifest>

View File

@ -0,0 +1,34 @@
package me.lake.librestreaming.client;
import android.os.Handler;
import android.os.Looper;
import java.util.concurrent.Executor;
public class CallbackDelivery {
static private CallbackDelivery instance;
private final Executor mCallbackPoster;
private final Handler handler = new Handler(Looper.getMainLooper());
public static CallbackDelivery i() {
return instance == null ? instance = new CallbackDelivery() : instance;
}
private CallbackDelivery() {
mCallbackPoster = new Executor() {
@Override
public void execute(Runnable command) {
handler.post(command);
}
};
}
public void post(Runnable runnable) {
mCallbackPoster.execute(runnable);
}
public void postDelayed(Runnable runnable, long time) {
handler.postDelayed(runnable,time);
}
}

View File

@ -0,0 +1,6 @@
package me.lake.librestreaming.client;
public class Constants {
public static String VERSION = "0.1.0";
}

View File

@ -0,0 +1,134 @@
package me.lake.librestreaming.client;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import me.lake.librestreaming.core.RESSoftAudioCore;
import me.lake.librestreaming.filter.softaudiofilter.BaseSoftAudioFilter;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.tools.LogTools;
public class RESAudioClient {
RESCoreParameters resCoreParameters;
private final Object syncOp = new Object();
private AudioRecordThread audioRecordThread;
private AudioRecord audioRecord;
private byte[] audioBuffer;
private RESSoftAudioCore softAudioCore;
public RESAudioClient(RESCoreParameters parameters) {
resCoreParameters = parameters;
}
public boolean prepare(RESConfig resConfig) {
synchronized (syncOp) {
resCoreParameters.audioBufferQueueNum = 5;
softAudioCore = new RESSoftAudioCore(resCoreParameters);
if (!softAudioCore.prepare(resConfig)) {
LogTools.e("RESAudioClient,prepare");
return false;
}
resCoreParameters.audioRecoderFormat = AudioFormat.ENCODING_PCM_16BIT;
resCoreParameters.audioRecoderChannelConfig = AudioFormat.CHANNEL_IN_MONO;
resCoreParameters.audioRecoderSliceSize = resCoreParameters.mediacodecAACSampleRate / 10;
resCoreParameters.audioRecoderBufferSize = resCoreParameters.audioRecoderSliceSize * 2;
resCoreParameters.audioRecoderSource = MediaRecorder.AudioSource.DEFAULT;
resCoreParameters.audioRecoderSampleRate = resCoreParameters.mediacodecAACSampleRate;
prepareAudio();
return true;
}
}
public boolean start(RESFlvDataCollecter flvDataCollecter) {
synchronized (syncOp) {
softAudioCore.start(flvDataCollecter);
audioRecord.startRecording();
audioRecordThread = new AudioRecordThread();
audioRecordThread.start();
LogTools.d("RESAudioClient,start()");
return true;
}
}
public boolean stop() {
synchronized (syncOp) {
if(audioRecordThread != null) {
audioRecordThread.quit();
try {
audioRecordThread.join();
} catch (InterruptedException ignored) {
}
softAudioCore.stop();
audioRecordThread = null;
audioRecord.stop();
return true;
}
return true;
}
}
public boolean destroy() {
synchronized (syncOp) {
audioRecord.release();
return true;
}
}
public void setSoftAudioFilter(BaseSoftAudioFilter baseSoftAudioFilter) {
softAudioCore.setAudioFilter(baseSoftAudioFilter);
}
public BaseSoftAudioFilter acquireSoftAudioFilter() {
return softAudioCore.acquireAudioFilter();
}
public void releaseSoftAudioFilter() {
softAudioCore.releaseAudioFilter();
}
private boolean prepareAudio() {
int minBufferSize = AudioRecord.getMinBufferSize(resCoreParameters.audioRecoderSampleRate,
resCoreParameters.audioRecoderChannelConfig,
resCoreParameters.audioRecoderFormat);
audioRecord = new AudioRecord(resCoreParameters.audioRecoderSource,
resCoreParameters.audioRecoderSampleRate,
resCoreParameters.audioRecoderChannelConfig,
resCoreParameters.audioRecoderFormat,
minBufferSize * 5);
audioBuffer = new byte[resCoreParameters.audioRecoderBufferSize];
if (AudioRecord.STATE_INITIALIZED != audioRecord.getState()) {
LogTools.e("audioRecord.getState()!=AudioRecord.STATE_INITIALIZED!");
return false;
}
if (AudioRecord.SUCCESS != audioRecord.setPositionNotificationPeriod(resCoreParameters.audioRecoderSliceSize)) {
LogTools.e("AudioRecord.SUCCESS != audioRecord.setPositionNotificationPeriod(" + resCoreParameters.audioRecoderSliceSize + ")");
return false;
}
return true;
}
class AudioRecordThread extends Thread {
private boolean isRunning = true;
AudioRecordThread() {
isRunning = true;
}
public void quit() {
isRunning = false;
}
@Override
public void run() {
LogTools.d("AudioRecordThread,tid=" + Thread.currentThread().getId());
while (isRunning) {
int size = audioRecord.read(audioBuffer, 0, audioBuffer.length);
if (isRunning && softAudioCore != null && size > 0) {
softAudioCore.queueAudio(audioBuffer);
}
}
}
}
}

View File

@ -0,0 +1,541 @@
package me.lake.librestreaming.client;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.os.Build;
import android.widget.Toast;
import java.lang.ref.WeakReference;
import me.lake.librestreaming.core.listener.RESConnectionListener;
import me.lake.librestreaming.core.listener.RESScreenShotListener;
import me.lake.librestreaming.core.listener.RESVideoChangeListener;
import me.lake.librestreaming.encoder.MediaVideoEncoder;
import me.lake.librestreaming.filter.hardvideofilter.BaseHardVideoFilter;
import me.lake.librestreaming.filter.softaudiofilter.BaseSoftAudioFilter;
import me.lake.librestreaming.filter.softvideofilter.BaseSoftVideoFilter;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.model.Size;
import me.lake.librestreaming.rtmp.RESFlvData;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.rtmp.RESRtmpSender;
import me.lake.librestreaming.tools.LogTools;
public class RESClient {
public RESVideoClient videoClient;
private RESAudioClient audioClient;
private final Object SyncOp;
//parameters
RESCoreParameters coreParameters;
private RESRtmpSender rtmpSender;
private RESFlvDataCollecter dataCollecter;
//是否在推流
public boolean isStreaming = false;
private WeakReference<Activity> mActivity;
public RESClient() {
SyncOp = new Object();
coreParameters = new RESCoreParameters();
CallbackDelivery.i();
}
public void setContext(Context context){
if(context instanceof Activity){
this.mActivity = new WeakReference<Activity>((Activity) context);
}
}
/**
* prepare to stream
*
* @param resConfig config
* @return true if prepare success
*/
public boolean prepare(RESConfig resConfig) {
synchronized (SyncOp) {
checkDirection(resConfig);
coreParameters.filterMode = resConfig.getFilterMode();
coreParameters.rtmpAddr = resConfig.getRtmpAddr();
coreParameters.printDetailMsg = resConfig.isPrintDetailMsg();
coreParameters.senderQueueLength = 200;//150
videoClient = new RESVideoClient(coreParameters);
videoClient.setActivity(mActivity.get());
audioClient = new RESAudioClient(coreParameters);
if (!videoClient.prepare(resConfig)) {
LogTools.d("!!!!!videoClient.prepare()failed");
LogTools.d(coreParameters.toString());
return false;
}
if (!audioClient.prepare(resConfig)) {
LogTools.d("!!!!!audioClient.prepare()failed");
LogTools.d(coreParameters.toString());
return false;
}
rtmpSender = new RESRtmpSender();
rtmpSender.prepare(coreParameters);
dataCollecter = new RESFlvDataCollecter() {
@Override
public void collect(RESFlvData flvData, int type) {
if(rtmpSender != null){
rtmpSender.feed(flvData, type);
}
}
};
coreParameters.done = true;
LogTools.d("===INFO===coreParametersReady:");
LogTools.d(coreParameters.toString());
return true;
}
}
/**
* start streaming
*/
public void startStreaming(String rtmpAddr) {
isStreaming = true;
synchronized (SyncOp) {
try {
videoClient.startStreaming(dataCollecter);
rtmpSender.start(rtmpAddr == null ? coreParameters.rtmpAddr : rtmpAddr);
audioClient.start(dataCollecter);
LogTools.d("RESClient,startStreaming()");
}catch (Exception e){
if(mActivity.get() !=null){
Toast.makeText(mActivity.get(),"可能没有权限",Toast.LENGTH_LONG).show();
mActivity.get().finish();
}
}
}
}
/**
* start streaming
*/
public void startStreaming() {
isStreaming = true;
synchronized (SyncOp) {
videoClient.startStreaming(dataCollecter);
rtmpSender.start(coreParameters.rtmpAddr);
audioClient.start(dataCollecter);
LogTools.d("RESClient,startStreaming()");
}
}
/**
* stop streaming
*/
public void stopStreaming() {
isStreaming = false;
synchronized (SyncOp) {
videoClient.stopStreaming();
audioClient.stop();
rtmpSender.stop();
LogTools.d("RESClient,stopStreaming()");
}
}
/**
* clean up
*/
public void destroy() {
synchronized (SyncOp) {
rtmpSender.destroy();
videoClient.destroy();
audioClient.destroy();
rtmpSender = null;
videoClient = null;
audioClient = null;
LogTools.d("RESClient,destroy()");
}
}
/**
* call it AFTER {@link #prepare(RESConfig)}
*
* @param surfaceTexture to rendering preview
*/
public void startPreview(SurfaceTexture surfaceTexture, int visualWidth, int visualHeight) {
if(videoClient != null){
videoClient.startPreview(surfaceTexture, visualWidth, visualHeight);
}
LogTools.d("RESClient,startPreview()");
}
public void updatePreview(int visualWidth, int visualHeight) {
if(videoClient != null){
videoClient.updatePreview(visualWidth, visualHeight);
}
LogTools.d("RESClient,updatePreview()");
}
public Camera getCamera(){
return videoClient.getCamera();
}
public int getCameraId(){
return videoClient.currentCameraIndex;
}
/**
*
* @param releaseTexture true if you won`t reuse this surfaceTexture later
*/
public void stopPreview(boolean releaseTexture) {
if(videoClient != null){
videoClient.stopPreview(releaseTexture);
}
LogTools.d("RESClient,stopPreview()");
}
/**
* change camera on running.<br/>
*/
public boolean swapCamera() {
synchronized (SyncOp) {
LogTools.d("RESClient,swapCamera()");
return videoClient.swapCamera();
}
}
/**
* only for soft filter mode.<br/>
* use it to update filter property.<br/>
* call it with {@link #releaseSoftVideoFilter()}<br/>
* make sure to release it in 3ms
*
* @return the videofilter in use
*/
public BaseSoftVideoFilter acquireSoftVideoFilter() {
return videoClient.acquireSoftVideoFilter();
}
/**
* only for soft filter mode.<br/>
* call it with {@link #acquireSoftVideoFilter()}
*/
public void releaseSoftVideoFilter() {
videoClient.releaseSoftVideoFilter();
}
/**
* get the real video size,call after prepare()
*
* @return
*/
public Size getVideoSize() {
return new Size(coreParameters.videoWidth, coreParameters.videoHeight);
}
/**
* get the rtmp server ip addr ,call after connect success.
*
* @return
*/
public String getServerIpAddr() {
synchronized (SyncOp) {
return rtmpSender == null ? null : rtmpSender.getServerIpAddr();
}
}
/**
* get the real draw frame rate of screen
*
* @return
*/
public float getDrawFrameRate() {
synchronized (SyncOp) {
return videoClient == null ? 0 : videoClient.getDrawFrameRate();
}
}
/**
* get the rate of video frame sent by rtmp
*
* @return
*/
public float getSendFrameRate() {
synchronized (SyncOp) {
return rtmpSender == null ? 0 : rtmpSender.getSendFrameRate();
}
}
/**
* get free percent of send buffer
* return ~0.0 if the netspeed is not enough or net is blocked.
* @return
*/
public float getSendBufferFreePercent() {
synchronized (SyncOp) {
return rtmpSender == null ? 0 : rtmpSender.getSendBufferFreePercent();
}
}
/**
* only for soft filter mode.<br/>
* set videofilter.<br/>
* can be called Repeatedly.<br/>
* do NOT call it between {@link #acquireSoftVideoFilter()} & {@link #releaseSoftVideoFilter()}
*
* @param baseSoftVideoFilter videofilter to apply
*/
public void setSoftVideoFilter(BaseSoftVideoFilter baseSoftVideoFilter) {
videoClient.setSoftVideoFilter(baseSoftVideoFilter);
}
/**
* only for hard filter mode.<br/>
* use it to update filter property.<br/>
* call it with {@link #releaseHardVideoFilter()}<br/>
* make sure to release it in 3ms
*
* @return the videofilter in use
*/
public BaseHardVideoFilter acquireHardVideoFilter() {
return videoClient.acquireHardVideoFilter();
}
/**
* only for hard filter mode.<br/>
* call it with {@link #acquireHardVideoFilter()}
*/
public void releaseHardVideoFilter() {
videoClient.releaseHardVideoFilter();
}
/**
* only for hard filter mode.<br/>
* set videofilter.<br/>
* can be called Repeatedly.<br/>
* do NOT call it between {@link #acquireHardVideoFilter()} & {@link #acquireHardVideoFilter()}
*
* @param baseHardVideoFilter videofilter to apply
*/
public void setHardVideoFilter(BaseHardVideoFilter baseHardVideoFilter) {
videoClient.setHardVideoFilter(baseHardVideoFilter);
}
/**
* set audiofilter.<br/>
* can be called Repeatedly.<br/>
* do NOT call it between {@link #acquireSoftAudioFilter()} & {@link #releaseSoftAudioFilter()}
*
* @param baseSoftAudioFilter audiofilter to apply
*/
public void setSoftAudioFilter(BaseSoftAudioFilter baseSoftAudioFilter) {
audioClient.setSoftAudioFilter(baseSoftAudioFilter);
}
/**
* use it to update filter property.<br/>
* call it with {@link #releaseSoftAudioFilter()}<br/>
* make sure to release it in 3ms
*
* @return the audiofilter in use
*/
public BaseSoftAudioFilter acquireSoftAudioFilter() {
return audioClient.acquireSoftAudioFilter();
}
/**
* call it with {@link #acquireSoftAudioFilter()}
*/
public void releaseSoftAudioFilter() {
audioClient.releaseSoftAudioFilter();
}
/**
* get video & audio real send Speed
*
* @return speed in B/s
*/
public int getAVSpeed() {
synchronized (SyncOp) {
return rtmpSender == null ? 0 : rtmpSender.getTotalSpeed();
}
}
/**
* call it AFTER {@link #prepare(RESConfig)}
*
* @param connectionListener
*/
public void setConnectionListener(RESConnectionListener connectionListener) {
if(rtmpSender != null) {
rtmpSender.setConnectionListener(connectionListener);
}
}
/**
* listener for video size change
* @param videoChangeListener
*/
public void setVideoChangeListener(RESVideoChangeListener videoChangeListener) {
if(videoClient != null){
videoClient.setVideoChangeListener(videoChangeListener);
}
}
/**
* get the param of video,audio,mediacodec
*
* @return info
*/
public String getConfigInfo() {
return coreParameters.toString();
}
/**
* set zoom by percent [0.0f,1.0f]
*
* @param targetPercent zoompercent
*/
public boolean setZoomByPercent(float targetPercent) {
return videoClient.setZoomByPercent(targetPercent);
}
/**
* toggle flash light
*
* @return true if operation success
*/
public boolean toggleFlashLight() {
return videoClient.toggleFlashLight();
}
public void takeScreenShot(RESScreenShotListener listener) {
videoClient.takeScreenShot(listener);
}
/**
* Change video bitrate on the fly<br/>
* call between {@link #startStreaming()} & {@link #stopStreaming()}
* @param bitrate target bitrate bits/sec
*/
@TargetApi(Build.VERSION_CODES.KITKAT)
public void reSetVideoBitrate(int bitrate) {
videoClient.reSetVideoBitrate(bitrate);
}
/**
* get current bitrate
* @return current bitrate bits/sec
*/
public int getVideoBitrate() {
return videoClient.getVideoBitrate();
}
/**
* update fps on the fly.
* @param fps
*/
public void reSetVideoFPS(int fps) {
videoClient.reSetVideoFPS(fps);
}
/**
* only work with hard mode.
* reset video size on the fly.
* may restart camera.
* will restart mediacodec.
* will not interrupt streaming
* @param targetVideoSize
*/
public void reSetVideoSize(Size targetVideoSize) {
if (targetVideoSize == null) {
return;
}
if (coreParameters.filterMode == RESCoreParameters.FILTER_MODE_SOFT) {
throw new IllegalArgumentException("soft mode doesn`t support reSetVideoSize");
}else {
videoClient.reSetVideoSize(targetVideoSize);
}
}
public RESRtmpSender getRtmpSender(){
return rtmpSender;
}
public String getVertion() {
return Constants.VERSION;
}
/**
* =====================PRIVATE=================
**/
private void checkDirection(RESConfig resConfig) {
int frontFlag = resConfig.getFrontCameraDirectionMode();
int backFlag = resConfig.getBackCameraDirectionMode();
int fbit = 0;
int bbit = 0;
if ((frontFlag >> 4) == 0) {
frontFlag |= RESCoreParameters.FLAG_DIRECTION_ROATATION_0;
}
if ((backFlag >> 4) == 0) {
backFlag |= RESCoreParameters.FLAG_DIRECTION_ROATATION_0;
}
for (int i = 4; i <= 8; ++i) {
if (((frontFlag >> i) & 0x1) == 1) {
fbit++;
}
if (((backFlag >> i) & 0x1) == 1) {
bbit++;
}
}
if (fbit != 1 || bbit != 1) {
throw new RuntimeException("invalid direction rotation flag:frontFlagNum=" + fbit + ",backFlagNum=" + bbit);
}
if (((frontFlag & RESCoreParameters.FLAG_DIRECTION_ROATATION_0) != 0) || ((frontFlag & RESCoreParameters.FLAG_DIRECTION_ROATATION_180) != 0)) {
fbit = 0;
} else {
fbit = 1;
}
if (((backFlag & RESCoreParameters.FLAG_DIRECTION_ROATATION_0) != 0) || ((backFlag & RESCoreParameters.FLAG_DIRECTION_ROATATION_180) != 0)) {
bbit = 0;
} else {
bbit = 1;
}
if (bbit != fbit) {
if (bbit == 0) {
throw new RuntimeException("invalid direction rotation flag:back camera is landscape but front camera is portrait");
} else {
throw new RuntimeException("invalid direction rotation flag:back camera is portrait but front camera is landscape");
}
}
if (fbit == 1) {
coreParameters.isPortrait = true;
} else {
coreParameters.isPortrait = false;
}
coreParameters.backCameraDirectionMode = backFlag;
coreParameters.frontCameraDirectionMode = frontFlag;
System.out.println("coreParameters.backCameraDirectionMode = " + coreParameters.backCameraDirectionMode);
}
public void setVideoEncoder(final MediaVideoEncoder encoder) {
videoClient.setVideoEncoder(encoder);
}
public void setMirror(boolean isEnableMirror,boolean isEnablePreviewMirror,boolean isEnableStreamMirror) {
videoClient.setMirror(isEnableMirror,isEnablePreviewMirror,isEnableStreamMirror);
}
public void setNeedResetEglContext(boolean bol){
videoClient.setNeedResetEglContext(bol);
}
public void setCreamAr(){
videoClient.setCameraArea();
}
static {
System.loadLibrary("restreaming");
}
}

View File

@ -0,0 +1,525 @@
package me.lake.librestreaming.client;
import android.app.Activity;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import me.lake.librestreaming.core.CameraHelper;
import me.lake.librestreaming.core.RESHardVideoCore;
import me.lake.librestreaming.core.RESSoftVideoCore;
import me.lake.librestreaming.core.RESVideoCore;
import me.lake.librestreaming.core.listener.RESScreenShotListener;
import me.lake.librestreaming.core.listener.RESVideoChangeListener;
import me.lake.librestreaming.encoder.MediaVideoEncoder;
import me.lake.librestreaming.filter.hardvideofilter.BaseHardVideoFilter;
import me.lake.librestreaming.filter.softvideofilter.BaseSoftVideoFilter;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.model.Size;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.tools.BuffSizeCalculator;
import me.lake.librestreaming.tools.CameraUtil;
import me.lake.librestreaming.tools.LogTools;
public class RESVideoClient {
RESCoreParameters resCoreParameters;
private final Object syncOp = new Object();
private Camera camera;
public SurfaceTexture camTexture;
private int cameraNum;
public int currentCameraIndex;
private RESVideoCore videoCore;
private boolean isStreaming;
private boolean isPreviewing;
public RESVideoClient(RESCoreParameters parameters) {
resCoreParameters = parameters;
cameraNum = Camera.getNumberOfCameras();
currentCameraIndex = Camera.CameraInfo.CAMERA_FACING_BACK;
isStreaming = false;
isPreviewing = false;
}
public boolean prepare(RESConfig resConfig) {
synchronized (syncOp) {
if ((cameraNum - 1) >= resConfig.getDefaultCamera()) {
currentCameraIndex = resConfig.getDefaultCamera();
}
if (null == (camera = createCamera(currentCameraIndex))) {
LogTools.e("can not open camera");
return false;
}
Camera.Parameters parameters = camera.getParameters();
CameraHelper.selectCameraPreviewWH(parameters, resCoreParameters, resConfig.getTargetPreviewSize());
CameraHelper.selectCameraFpsRange(parameters, resCoreParameters);
if (resConfig.getVideoFPS() > resCoreParameters.previewMaxFps / 1000) {
resCoreParameters.videoFPS = resCoreParameters.previewMaxFps / 1000;
} else {
resCoreParameters.videoFPS = resConfig.getVideoFPS();
}
resoveResolution(resCoreParameters, resConfig.getTargetVideoSize());
if (!CameraHelper.selectCameraColorFormat(parameters, resCoreParameters)) {
LogTools.e("CameraHelper.selectCameraColorFormat,Failed");
resCoreParameters.dump();
return false;
}
if (!CameraHelper.configCamera(camera, resCoreParameters)) {
LogTools.e("CameraHelper.configCamera,Failed");
resCoreParameters.dump();
return false;
}
switch (resCoreParameters.filterMode) {
case RESCoreParameters.FILTER_MODE_SOFT:
videoCore = new RESSoftVideoCore(resCoreParameters);
break;
case RESCoreParameters.FILTER_MODE_HARD:
videoCore = new RESHardVideoCore(resCoreParameters);
break;
}
if (!videoCore.prepare(resConfig)) {
return false;
}
videoCore.setCurrentCamera(currentCameraIndex);
prepareVideo();
return true;
}
}
public Camera getCamera(){
return camera;
}
private Camera createCamera(int cameraId) {
try {
camera = Camera.open(cameraId);
CameraUtil.setCameraDisplayOrientation(activity,cameraId,camera);
} catch (SecurityException e) {
LogTools.trace("no permission", e);
return null;
} catch (Exception e) {
LogTools.trace("camera.open()failed", e);
return null;
}
return camera;
}
private boolean prepareVideo() {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_SOFT) {
camera.addCallbackBuffer(new byte[resCoreParameters.previewBufferSize]);
camera.addCallbackBuffer(new byte[resCoreParameters.previewBufferSize]);
}
return true;
}
private boolean startVideo() {
camTexture = new SurfaceTexture(RESVideoCore.OVERWATCH_TEXTURE_ID);
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_SOFT) {
camera.setPreviewCallbackWithBuffer(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized (syncOp) {
if (videoCore != null && data != null) {
((RESSoftVideoCore) videoCore).queueVideo(data);
}
camera.addCallbackBuffer(data);
}
}
});
} else {
camTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
synchronized (syncOp) {
if (videoCore != null) {
((RESHardVideoCore) videoCore).onFrameAvailable();
}
}
}
});
}
try {
camera.setPreviewTexture(camTexture);
} catch (IOException e) {
LogTools.trace(e);
camera.release();
return false;
}
camera.startPreview();
return true;
}
public boolean startPreview(SurfaceTexture surfaceTexture, int visualWidth, int visualHeight) {
synchronized (syncOp) {
if (!isStreaming && !isPreviewing) {
if (!startVideo()) {
resCoreParameters.dump();
LogTools.e("RESVideoClient,start(),failed");
return false;
}
videoCore.updateCamTexture(camTexture);
}
videoCore.startPreview(surfaceTexture, visualWidth, visualHeight);
isPreviewing = true;
return true;
}
}
public void updatePreview(int visualWidth, int visualHeight) {
videoCore.updatePreview(visualWidth, visualHeight);
}
public boolean stopPreview(boolean releaseTexture) {
synchronized (syncOp) {
if (isPreviewing) {
videoCore.stopPreview(releaseTexture);
if (!isStreaming) {
camera.stopPreview();
videoCore.updateCamTexture(null);
camTexture.release();
}
}
isPreviewing = false;
return true;
}
}
public boolean startStreaming(RESFlvDataCollecter flvDataCollecter) {
synchronized (syncOp) {
if (!isStreaming && !isPreviewing) {
if (!startVideo()) {
resCoreParameters.dump();
LogTools.e("RESVideoClient,start(),failed");
return false;
}
videoCore.updateCamTexture(camTexture);
}
videoCore.startStreaming(flvDataCollecter);
isStreaming = true;
return true;
}
}
public boolean stopStreaming() {
synchronized (syncOp) {
if (isStreaming) {
videoCore.stopStreaming();
if (!isPreviewing) {
camera.stopPreview();
videoCore.updateCamTexture(null);
camTexture.release();
}
}
isStreaming = false;
return true;
}
}
public boolean destroy() {
synchronized (syncOp) {
camera.release();
videoCore.destroy();
videoCore = null;
camera = null;
return true;
}
}
public boolean swapCamera() {
synchronized (syncOp) {
LogTools.d("RESClient,swapCamera()");
camera.stopPreview();
camera.release();
camera = null;
if (null == (camera = createCamera(currentCameraIndex = (++currentCameraIndex) % cameraNum))) {
LogTools.e("can not swap camera");
return false;
}
videoCore.setCurrentCamera(currentCameraIndex);
CameraHelper.selectCameraFpsRange(camera.getParameters(), resCoreParameters);
if (!CameraHelper.configCamera(camera, resCoreParameters)) {
camera.release();
return false;
}
prepareVideo();
camTexture.release();
videoCore.updateCamTexture(null);
startVideo();
videoCore.updateCamTexture(camTexture);
return true;
}
}
public boolean toggleFlashLight() {
synchronized (syncOp) {
try {
Camera.Parameters parameters = camera.getParameters();
List<String> flashModes = parameters.getSupportedFlashModes();
String flashMode = parameters.getFlashMode();
if (!Camera.Parameters.FLASH_MODE_TORCH.equals(flashMode)) {
if (flashModes.contains(Camera.Parameters.FLASH_MODE_TORCH)) {
parameters.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH);
camera.setParameters(parameters);
return true;
}
} else if (!Camera.Parameters.FLASH_MODE_OFF.equals(flashMode)) {
if (flashModes.contains(Camera.Parameters.FLASH_MODE_OFF)) {
parameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
camera.setParameters(parameters);
return true;
}
}
} catch (Exception e) {
LogTools.d("toggleFlashLight,failed" + e.getMessage());
return false;
}
return false;
}
}
public boolean setZoomByPercent(float targetPercent) {
synchronized (syncOp) {
targetPercent = Math.min(Math.max(0f, targetPercent), 1f);
Camera.Parameters p = camera.getParameters();
p.setZoom((int) (p.getMaxZoom() * targetPercent));
camera.setParameters(p);
return true;
}
}
public void reSetVideoBitrate(int bitrate) {
synchronized (syncOp) {
if (videoCore != null) {
videoCore.reSetVideoBitrate(bitrate);
}
}
}
public int getVideoBitrate() {
synchronized (syncOp) {
if (videoCore != null) {
return videoCore.getVideoBitrate();
} else {
return 0;
}
}
}
public void reSetVideoFPS(int fps) {
synchronized (syncOp) {
int targetFps;
if (fps > resCoreParameters.previewMaxFps / 1000) {
targetFps = resCoreParameters.previewMaxFps / 1000;
} else {
targetFps = fps;
}
if (videoCore != null) {
videoCore.reSetVideoFPS(targetFps);
}
}
}
public boolean reSetVideoSize(Size targetVideoSize) {
synchronized (syncOp) {
RESCoreParameters newParameters = new RESCoreParameters();
newParameters.isPortrait = resCoreParameters.isPortrait;
newParameters.filterMode = resCoreParameters.filterMode;
Camera.Parameters parameters = camera.getParameters();
CameraHelper.selectCameraPreviewWH(parameters, newParameters, targetVideoSize);
resoveResolution(newParameters, targetVideoSize);
boolean needRestartCamera = (newParameters.previewVideoHeight != resCoreParameters.previewVideoHeight
|| newParameters.previewVideoWidth != resCoreParameters.previewVideoWidth);
if (needRestartCamera) {
newParameters.previewBufferSize = BuffSizeCalculator.calculator(resCoreParameters.previewVideoWidth,
resCoreParameters.previewVideoHeight, resCoreParameters.previewColorFormat);
resCoreParameters.previewVideoWidth = newParameters.previewVideoWidth;
resCoreParameters.previewVideoHeight = newParameters.previewVideoHeight;
resCoreParameters.previewBufferSize = newParameters.previewBufferSize;
if ((isPreviewing || isStreaming)) {
LogTools.d("RESClient,reSetVideoSize.restartCamera");
camera.stopPreview();
camera.release();
camera = null;
if (null == (camera = createCamera(currentCameraIndex))) {
LogTools.e("can not createCamera camera");
return false;
}
if (!CameraHelper.configCamera(camera, resCoreParameters)) {
camera.release();
return false;
}
prepareVideo();
videoCore.updateCamTexture(null);
camTexture.release();
startVideo();
videoCore.updateCamTexture(camTexture);
}
}
videoCore.reSetVideoSize(newParameters);
return true;
}
}
public BaseSoftVideoFilter acquireSoftVideoFilter() {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_SOFT) {
return ((RESSoftVideoCore) videoCore).acquireVideoFilter();
}
return null;
}
public void releaseSoftVideoFilter() {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_SOFT) {
((RESSoftVideoCore) videoCore).releaseVideoFilter();
}
}
public void setSoftVideoFilter(BaseSoftVideoFilter baseSoftVideoFilter) {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_SOFT) {
((RESSoftVideoCore) videoCore).setVideoFilter(baseSoftVideoFilter);
}
}
public BaseHardVideoFilter acquireHardVideoFilter() {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_HARD) {
return ((RESHardVideoCore) videoCore).acquireVideoFilter();
}
return null;
}
public void releaseHardVideoFilter() {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_HARD) {
((RESHardVideoCore) videoCore).releaseVideoFilter();
}
}
public void setHardVideoFilter(BaseHardVideoFilter baseHardVideoFilter) {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_HARD) {
System.err.println("videoCore is null ?"+videoCore);
((RESHardVideoCore) videoCore).setVideoFilter(baseHardVideoFilter);
}
}
public void takeScreenShot(RESScreenShotListener listener) {
synchronized (syncOp) {
if (videoCore != null) {
videoCore.takeScreenShot(listener);
}
}
}
public void setVideoChangeListener(RESVideoChangeListener listener) {
synchronized (syncOp) {
if (videoCore != null) {
videoCore.setVideoChangeListener(listener);
}
}
}
public float getDrawFrameRate() {
synchronized (syncOp) {
return videoCore == null ? 0 : videoCore.getDrawFrameRate();
}
}
private void resoveResolution(RESCoreParameters resCoreParameters, Size targetVideoSize) {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_SOFT) {
if (resCoreParameters.isPortrait) {
resCoreParameters.videoHeight = resCoreParameters.previewVideoWidth;
resCoreParameters.videoWidth = resCoreParameters.previewVideoHeight;
} else {
resCoreParameters.videoWidth = resCoreParameters.previewVideoWidth;
resCoreParameters.videoHeight = resCoreParameters.previewVideoHeight;
}
} else {
float pw, ph, vw, vh;
if (resCoreParameters.isPortrait) {
resCoreParameters.videoHeight = targetVideoSize.getWidth();
resCoreParameters.videoWidth = targetVideoSize.getHeight();
pw = resCoreParameters.previewVideoHeight;
ph = resCoreParameters.previewVideoWidth;
} else {
resCoreParameters.videoWidth = targetVideoSize.getWidth();
resCoreParameters.videoHeight = targetVideoSize.getHeight();
pw = resCoreParameters.previewVideoWidth;
ph = resCoreParameters.previewVideoHeight;
}
vw = resCoreParameters.videoWidth;
vh = resCoreParameters.videoHeight;
float pr = ph / pw, vr = vh / vw;
if (pr == vr) {
resCoreParameters.cropRatio = 0.0f;
} else if (pr > vr) {
resCoreParameters.cropRatio = (1.0f - vr / pr) / 2.0f;
} else {
resCoreParameters.cropRatio = -(1.0f - pr / vr) / 2.0f;
}
}
}
public void setVideoEncoder(final MediaVideoEncoder encoder) {
videoCore.setVideoEncoder(encoder);
}
public void setMirror(boolean isEnableMirror,boolean isEnablePreviewMirror,boolean isEnableStreamMirror) {
videoCore.setMirror(isEnableMirror,isEnablePreviewMirror,isEnableStreamMirror);
}
public void setNeedResetEglContext(boolean bol){
videoCore.setNeedResetEglContext(bol);
}
public void setActivity(Activity activity) {
this.activity=activity;
}
private Activity activity;
/**
* 转换对焦区域
* 范围(-1000, -1000, 1000, 1000)
* x,y是坐标位置width,height SurfaceView的宽高coefficient是区域比例大小
*/
private static Rect calculateTapArea(float x, float y, int width, int height, float coefficient) {
float focusAreaSize = 200;
//这段代码可以看出coefficient的作用只是为了扩展areaSize
int areaSize = (int) (focusAreaSize * coefficient);
int surfaceWidth = width;
int surfaceHeight = height;
//解释一下为什么*2000因为要把surfaceView的坐标转换为范围(-1000, -1000, 1000, 1000)则SurfaceView的中心点坐标会转化为0,0x/surfaceWidth 得到当前x坐标占总宽度的比例然后乘以2000就换算成了0,02000,2000的坐标范围内然后减去1000就换算为了范围(-1000, -1000, 1000, 1000)的坐标
//得到了x,y转换后的坐标利用areaSize就可以得到聚焦区域
int centerX = (int) (x / surfaceHeight * 2000 - 1000);
int centerY = (int) (y / surfaceWidth * 2000 - 1000);
int left = clamp(centerX - (areaSize / 2), -1000, 1000);
int top = clamp(centerY - (areaSize / 2), -1000, 1000);
int right = clamp(left + areaSize, -1000, 1000);
int bottom = clamp(top + areaSize, -1000, 1000);
return new Rect(left, top, right, bottom);
}
//不大于最大值不小于最小值
private static int clamp(int x, int min, int max) {
if (x > max) {
return max;
}
if (x < min) {
return min;
}
return x;
}
public void setCameraArea(){
System.out.println("设置对焦");
List<Camera.Area> focusAreas = new ArrayList<>();
focusAreas.add(new Camera.Area(calculateTapArea(0,0,5,5,1.0f), 800));
Camera.Parameters parameters=camera.getParameters();
parameters.setFocusAreas(focusAreas);
camera.setParameters(parameters);
camera.autoFocus(new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean b, Camera camera) {
System.out.println("对焦完成");
}
});
}
}

View File

@ -0,0 +1,109 @@
package me.lake.librestreaming.core;
import android.media.MediaCodec;
import java.nio.ByteBuffer;
import me.lake.librestreaming.rtmp.RESFlvData;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.rtmp.RESRtmpSender;
import me.lake.librestreaming.tools.LogTools;
public class AudioSenderThread extends Thread {
private static final long WAIT_TIME = 5000;//1ms;
private MediaCodec.BufferInfo eInfo;
private long startTime = 0;
private MediaCodec dstAudioEncoder;
private RESFlvDataCollecter dataCollecter;
AudioSenderThread(String name, MediaCodec encoder, RESFlvDataCollecter flvDataCollecter) {
super(name);
eInfo = new MediaCodec.BufferInfo();
startTime = 0;
dstAudioEncoder = encoder;
dataCollecter = flvDataCollecter;
}
private boolean shouldQuit = false;
void quit() {
shouldQuit = true;
this.interrupt();
}
@Override
public void run() {
while (!shouldQuit) {
int eobIndex = dstAudioEncoder.dequeueOutputBuffer(eInfo, WAIT_TIME);
switch (eobIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
LogTools.d("AudioSenderThread,MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED");
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
// LogTools.d("AudioSenderThread,MediaCodec.INFO_TRY_AGAIN_LATER");
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
LogTools.d("AudioSenderThread,MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:" +
dstAudioEncoder.getOutputFormat().toString());
ByteBuffer csd0 = dstAudioEncoder.getOutputFormat().getByteBuffer("csd-0");
sendAudioSpecificConfig(0, csd0);
break;
default:
LogTools.d("AudioSenderThread,MediaCode,eobIndex=" + eobIndex);
if (startTime == 0) {
startTime = eInfo.presentationTimeUs / 1000;
}
/**
* we send audio SpecificConfig already in INFO_OUTPUT_FORMAT_CHANGED
* so we ignore MediaCodec.BUFFER_FLAG_CODEC_CONFIG
*/
if (eInfo.flags != MediaCodec.BUFFER_FLAG_CODEC_CONFIG && eInfo.size != 0) {
ByteBuffer realData = dstAudioEncoder.getOutputBuffers()[eobIndex];
realData.position(eInfo.offset);
realData.limit(eInfo.offset + eInfo.size);
sendRealData((eInfo.presentationTimeUs / 1000) - startTime, realData);
}
dstAudioEncoder.releaseOutputBuffer(eobIndex, false);
break;
}
}
eInfo = null;
}
private void sendAudioSpecificConfig(long tms, ByteBuffer realData) {
int packetLen = Packager.FLVPackager.FLV_AUDIO_TAG_LENGTH +
realData.remaining();
byte[] finalBuff = new byte[packetLen];
realData.get(finalBuff, Packager.FLVPackager.FLV_AUDIO_TAG_LENGTH,
realData.remaining());
Packager.FLVPackager.fillFlvAudioTag(finalBuff,
0,
true);
RESFlvData resFlvData = new RESFlvData();
resFlvData.droppable = false;
resFlvData.byteBuffer = finalBuff;
resFlvData.size = finalBuff.length;
resFlvData.dts = (int) tms;
resFlvData.flvTagType = RESFlvData.FLV_RTMP_PACKET_TYPE_AUDIO;
dataCollecter.collect(resFlvData, RESRtmpSender.FROM_AUDIO);
}
private void sendRealData(long tms, ByteBuffer realData) {
int packetLen = Packager.FLVPackager.FLV_AUDIO_TAG_LENGTH +
realData.remaining();
byte[] finalBuff = new byte[packetLen];
realData.get(finalBuff, Packager.FLVPackager.FLV_AUDIO_TAG_LENGTH,
realData.remaining());
Packager.FLVPackager.fillFlvAudioTag(finalBuff,
0,
false);
RESFlvData resFlvData = new RESFlvData();
resFlvData.droppable = true;
resFlvData.byteBuffer = finalBuff;
resFlvData.size = finalBuff.length;
resFlvData.dts = (int) tms;
resFlvData.flvTagType = RESFlvData.FLV_RTMP_PACKET_TYPE_AUDIO;
dataCollecter.collect(resFlvData, RESRtmpSender.FROM_AUDIO);
}
}

View File

@ -0,0 +1,131 @@
package me.lake.librestreaming.core;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.hardware.Camera;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.model.Size;
import me.lake.librestreaming.tools.LogTools;
public class CameraHelper {
public static int targetFps = 30000;
private static int[] supportedSrcVideoFrameColorType = new int[]{ImageFormat.NV21, ImageFormat.YV12};
public static boolean configCamera(Camera camera, RESCoreParameters coreParameters) {
camera.cancelAutoFocus();
Camera.Parameters parameters = camera.getParameters();
parameters.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO);
List<String> focusModes = parameters.getSupportedFocusModes();
if (focusModes != null) {
for (String focusMode : focusModes) {
System.out.println("focusMode = " + focusMode);
}
/*if (focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
} else if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
} else if (focusModes.contains(Camera.Parameters.FOCUS_MODE_FIXED)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_FIXED);
}*/
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
}
parameters.setPreviewSize(coreParameters.previewVideoWidth, coreParameters.previewVideoHeight);
parameters.setPreviewFpsRange(coreParameters.previewMinFps, coreParameters.previewMaxFps);
try {
camera.setParameters(parameters);
} catch (Exception e) {
camera.release();
return false;
}
return true;
}
public static void selectCameraFpsRange(Camera.Parameters parameters, RESCoreParameters coreParameters) {
List<int[]> fpsRanges = parameters.getSupportedPreviewFpsRange();
/* Collections.sort(fpsRanges, new Comparator<int[]>() {
@Override
public int compare(int[] lhs, int[] rhs) {
int r = Math.abs(lhs[0] - targetFps) + Math.abs(lhs[1] - targetFps);
int l = Math.abs(rhs[0] - targetFps) + Math.abs(rhs[1] - targetFps);
if (r > l) {
return 1;
} else if (r < l) {
return -1;
} else {
return 0;
}
}
});*/
coreParameters.previewMinFps=fpsRanges.get(0)[0];
for (int[] fpsRange : fpsRanges) {
if(coreParameters.previewMinFps>fpsRange[0]){
coreParameters.previewMinFps=fpsRange[0];
}
if(coreParameters.previewMaxFps<fpsRange[1]){
coreParameters.previewMaxFps=fpsRange[1];
}
}
for (int[] range : fpsRanges) {
System.out.println("最小fps = " + range[0]);
System.out.println("最大fps = " + range[1]);
System.out.println("--------------");
}
System.out.println("最小fps *= " + coreParameters.previewMinFps);
System.out.println("最大fps *= " + coreParameters.previewMaxFps);
System.out.println("fpsRanges.size() = " + fpsRanges.size());
}
public static void selectCameraPreviewWH(Camera.Parameters parameters, RESCoreParameters coreParameters, Size targetSize) {
List<Camera.Size> previewsSizes = parameters.getSupportedPreviewSizes();
Collections.sort(previewsSizes, new Comparator<Camera.Size>() {
@Override
public int compare(Camera.Size lhs, Camera.Size rhs) {
if ((lhs.width * lhs.height) > (rhs.width * rhs.height)) {
return 1;
} else {
return -1;
}
}
});
for (Camera.Size size : previewsSizes) {
if (size.width >= targetSize.getWidth() && size.height >= targetSize.getHeight()) {
coreParameters.previewVideoWidth = size.width;
coreParameters.previewVideoHeight = size.height;
return;
}
}
}
public static boolean selectCameraColorFormat(Camera.Parameters parameters, RESCoreParameters coreParameters) {
List<Integer> srcColorTypes = new LinkedList<>();
List<Integer> supportedPreviewFormates = parameters.getSupportedPreviewFormats();
for (int colortype : supportedSrcVideoFrameColorType) {
if (supportedPreviewFormates.contains(colortype)) {
srcColorTypes.add(colortype);
}
}
//select preview colorformat
if (srcColorTypes.contains(coreParameters.previewColorFormat = ImageFormat.NV21)) {
coreParameters.previewColorFormat = ImageFormat.NV21;
} else if ((srcColorTypes.contains(coreParameters.previewColorFormat = ImageFormat.YV12))) {
coreParameters.previewColorFormat = ImageFormat.YV12;
} else {
LogTools.e("!!!!!!!!!!!UnSupport,previewColorFormat");
return false;
}
return true;
}
}

View File

@ -0,0 +1,19 @@
package me.lake.librestreaming.core;
@SuppressWarnings("all")
public class ColorHelper {
static public native void NV21TOYUV420SP(byte[] src, byte[] dst, int YSize);
static public native void NV21TOYUV420P(byte[] src, byte[] dst, int YSize);
static public native void YUV420SPTOYUV420P(byte[] src, byte[] dst, int YSize);
static public native void NV21TOARGB(byte[] src, int[] dst, int width,int height);
static public native void FIXGLPIXEL(int[] src,int[] dst, int width,int height);
//slow
static public native void NV21Transform(byte[] src, byte[] dst, int srcwidth,int srcheight,int directionFlag);
}

View File

@ -0,0 +1,499 @@
package me.lake.librestreaming.core;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import android.view.Surface;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import javax.microedition.khronos.egl.EGL10;
import me.lake.librestreaming.model.MediaCodecGLWapper;
import me.lake.librestreaming.model.OffScreenGLWapper;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.model.ScreenGLWapper;
import me.lake.librestreaming.tools.GLESTools;
public class GLHelper {
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
private static String VERTEXSHADER = "" +
"attribute vec4 aPosition;\n" +
"attribute vec2 aTextureCoord;\n" +
"varying vec2 vTextureCoord;\n" +
"void main(){\n" +
" gl_Position= aPosition;\n" +
" vTextureCoord = aTextureCoord;\n" +
"}";
private static final String VERTEXSHADER_CAMERA2D =
"attribute vec4 aPosition;\n" +
"attribute vec4 aTextureCoord;\n" +
"uniform mat4 uTextureMatrix;\n" +
"varying vec2 vTextureCoord;\n" +
"void main(){\n" +
" gl_Position= aPosition;\n" +
" vTextureCoord = (uTextureMatrix * aTextureCoord).xy;\n" +
"}";
private static String FRAGMENTSHADER_CAMERA = "" +
"#extension GL_OES_EGL_image_external : require\n" +
"precision highp float;\n" +
"varying highp vec2 vTextureCoord;\n" +
"uniform sampler2D uTexture;\n" +
"void main(){\n" +
" vec4 color = texture2D(uTexture, vTextureCoord);\n" +
" gl_FragColor = color;\n" +
"}";
private static String FRAGMENTSHADER_CAMERA2D = "" +
"#extension GL_OES_EGL_image_external : require\n" +
"precision highp float;\n" +
"varying highp vec2 vTextureCoord;\n" +
"uniform samplerExternalOES uTexture;\n" +
"void main(){\n" +
" vec4 color = texture2D(uTexture, vTextureCoord);\n" +
" gl_FragColor = color;\n" +
"}";
private static String FRAGMENTSHADER_2D = "" +
"precision highp float;\n" +
"varying highp vec2 vTextureCoord;\n" +
"uniform sampler2D uTexture;\n" +
"void main(){\n" +
" vec4 color = texture2D(uTexture, vTextureCoord);\n" +
" gl_FragColor = color;\n" +
"}";
private static short drawIndices[] = {0, 1, 2, 0, 2, 3};
private static float SquareVertices[] = {
-1.0f, 1.0f,
-1.0f, -1.0f,
1.0f, -1.0f,
1.0f, 1.0f};
private static float CamTextureVertices[] = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f};
private static float Cam2dTextureVertices[] = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f};
private static float Cam2dTextureVertices_90[] = {
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f};
private static float Cam2dTextureVertices_180[] = {
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f,
0.0f, 0.0f};
private static float Cam2dTextureVertices_270[] = {
1.0f, 1.0f,
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f};
public static float MediaCodecTextureVertices[] = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f};
//镜像后的
// public static float MediaCodecTextureVertices[] = {
// 1.0f, 1.0f,
// 1.0f, 0.0f,
// 0.0f, 0.0f,
// 0.0f, 1.0f};
private static float ScreenTextureVertices[] = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f};
// private static float ScreenTextureVertices[] = {
// 1.0f, 1.0f,
// 1.0f, 0.0f,
// 0.0f, 0.0f,
// 0.0f, 1.0f};
public static int FLOAT_SIZE_BYTES = 4;
public static int SHORT_SIZE_BYTES = 2;
public static int COORDS_PER_VERTEX = 2;
public static int TEXTURE_COORDS_PER_VERTEX = 2;
public static void initOffScreenGL(OffScreenGLWapper wapper) {
wapper.eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (EGL14.EGL_NO_DISPLAY == wapper.eglDisplay) {
throw new RuntimeException("eglGetDisplay,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int versions[] = new int[2];
if (!EGL14.eglInitialize(wapper.eglDisplay, versions, 0, versions, 1)) {
throw new RuntimeException("eglInitialize,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int configsCount[] = new int[1];
EGLConfig configs[] = new EGLConfig[1];
int configSpec[] = new int[]{
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_DEPTH_SIZE, 0,
EGL14.EGL_STENCIL_SIZE, 0,
EGL14.EGL_NONE
};
EGL14.eglChooseConfig(wapper.eglDisplay, configSpec, 0, configs, 0, 1, configsCount, 0);
if (configsCount[0] <= 0) {
throw new RuntimeException("eglChooseConfig,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
wapper.eglConfig = configs[0];
int[] surfaceAttribs = {
EGL10.EGL_WIDTH, 1,
EGL10.EGL_HEIGHT, 1,
EGL14.EGL_NONE
};
int contextSpec[] = new int[]{
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
wapper.eglContext = EGL14.eglCreateContext(wapper.eglDisplay, wapper.eglConfig, EGL14.EGL_NO_CONTEXT, contextSpec, 0);
if (EGL14.EGL_NO_CONTEXT == wapper.eglContext) {
throw new RuntimeException("eglCreateContext,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int[] values = new int[1];
EGL14.eglQueryContext(wapper.eglDisplay, wapper.eglContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, values, 0);
wapper.eglSurface = EGL14.eglCreatePbufferSurface(wapper.eglDisplay, wapper.eglConfig, surfaceAttribs, 0);
if (null == wapper.eglSurface || EGL14.EGL_NO_SURFACE == wapper.eglSurface) {
throw new RuntimeException("eglCreateWindowSurface,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
}
public static void initMediaCodecGL(MediaCodecGLWapper wapper, EGLContext sharedContext, Surface mediaInputSurface) {
wapper.eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (EGL14.EGL_NO_DISPLAY == wapper.eglDisplay) {
throw new RuntimeException("eglGetDisplay,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int versions[] = new int[2];
if (!EGL14.eglInitialize(wapper.eglDisplay, versions, 0, versions, 1)) {
throw new RuntimeException("eglInitialize,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int configsCount[] = new int[1];
EGLConfig configs[] = new EGLConfig[1];
int configSpec[] = new int[]{
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL_RECORDABLE_ANDROID, 1,
EGL14.EGL_DEPTH_SIZE, 0,
EGL14.EGL_STENCIL_SIZE, 0,
EGL14.EGL_NONE
};
EGL14.eglChooseConfig(wapper.eglDisplay, configSpec, 0, configs, 0, 1, configsCount, 0);
if (configsCount[0] <= 0) {
throw new RuntimeException("eglChooseConfig,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
wapper.eglConfig = configs[0];
int[] surfaceAttribs = {
EGL14.EGL_NONE
};
int contextSpec[] = new int[]{
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
wapper.eglContext = EGL14.eglCreateContext(wapper.eglDisplay, wapper.eglConfig, sharedContext, contextSpec, 0);
if (EGL14.EGL_NO_CONTEXT == wapper.eglContext) {
throw new RuntimeException("eglCreateContext,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int[] values = new int[1];
EGL14.eglQueryContext(wapper.eglDisplay, wapper.eglContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, values, 0);
wapper.eglSurface = EGL14.eglCreateWindowSurface(wapper.eglDisplay, wapper.eglConfig, mediaInputSurface, surfaceAttribs, 0);
if (null == wapper.eglSurface || EGL14.EGL_NO_SURFACE == wapper.eglSurface) {
throw new RuntimeException("eglCreateWindowSurface,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
}
public static void initScreenGL(ScreenGLWapper wapper, EGLContext sharedContext, SurfaceTexture screenSurface) {
wapper.eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (EGL14.EGL_NO_DISPLAY == wapper.eglDisplay) {
throw new RuntimeException("eglGetDisplay,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int versions[] = new int[2];
if (!EGL14.eglInitialize(wapper.eglDisplay, versions, 0, versions, 1)) {
throw new RuntimeException("eglInitialize,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int configsCount[] = new int[1];
EGLConfig configs[] = new EGLConfig[1];
int configSpec[] = new int[]{
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_DEPTH_SIZE, 0,
EGL14.EGL_STENCIL_SIZE, 0,
EGL14.EGL_NONE
};
EGL14.eglChooseConfig(wapper.eglDisplay, configSpec, 0, configs, 0, 1, configsCount, 0);
if (configsCount[0] <= 0) {
throw new RuntimeException("eglChooseConfig,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
wapper.eglConfig = configs[0];
int[] surfaceAttribs = {
EGL14.EGL_NONE
};
int contextSpec[] = new int[]{
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
wapper.eglContext = EGL14.eglCreateContext(wapper.eglDisplay, wapper.eglConfig, sharedContext, contextSpec, 0);
if (EGL14.EGL_NO_CONTEXT == wapper.eglContext) {
throw new RuntimeException("eglCreateContext,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int[] values = new int[1];
EGL14.eglQueryContext(wapper.eglDisplay, wapper.eglContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, values, 0);
wapper.eglSurface = EGL14.eglCreateWindowSurface(wapper.eglDisplay, wapper.eglConfig, screenSurface, surfaceAttribs, 0);
if (null == wapper.eglSurface || EGL14.EGL_NO_SURFACE == wapper.eglSurface) {
throw new RuntimeException("eglCreateWindowSurface,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
}
public static void makeCurrent(OffScreenGLWapper wapper) {
if (!EGL14.eglMakeCurrent(wapper.eglDisplay, wapper.eglSurface, wapper.eglSurface, wapper.eglContext)) {
throw new RuntimeException("eglMakeCurrent,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
}
public static void makeCurrent(MediaCodecGLWapper wapper) {
if (!EGL14.eglMakeCurrent(wapper.eglDisplay, wapper.eglSurface, wapper.eglSurface, wapper.eglContext)) {
throw new RuntimeException("eglMakeCurrent,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
}
public static void makeCurrent(ScreenGLWapper wapper) {
if (!EGL14.eglMakeCurrent(wapper.eglDisplay, wapper.eglSurface, wapper.eglSurface, wapper.eglContext)) {
throw new RuntimeException("eglMakeCurrent,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
}
public static void createCamFrameBuff(int[] frameBuffer, int[] frameBufferTex, int width, int height) {
GLES20.glGenFramebuffers(1, frameBuffer, 0);
GLES20.glGenTextures(1, frameBufferTex, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, frameBufferTex[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer[0]);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, frameBufferTex[0], 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLESTools.checkGlError("createCamFrameBuff");
}
public static void enableVertex(int posLoc, int texLoc, FloatBuffer shapeBuffer, FloatBuffer texBuffer) {
GLES20.glEnableVertexAttribArray(posLoc);
GLES20.glEnableVertexAttribArray(texLoc);
GLES20.glVertexAttribPointer(posLoc, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
COORDS_PER_VERTEX * 4, shapeBuffer);
GLES20.glVertexAttribPointer(texLoc, TEXTURE_COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
TEXTURE_COORDS_PER_VERTEX * 4, texBuffer);
}
public static void disableVertex(int posLoc, int texLoc) {
GLES20.glDisableVertexAttribArray(posLoc);
GLES20.glDisableVertexAttribArray(texLoc);
}
public static int createCamera2DProgram() {
return GLESTools.createProgram(VERTEXSHADER_CAMERA2D, FRAGMENTSHADER_CAMERA2D);
}
public static int createCameraProgram() {
return GLESTools.createProgram(VERTEXSHADER, FRAGMENTSHADER_CAMERA);
}
public static int createMediaCodecProgram() {
return GLESTools.createProgram(VERTEXSHADER, FRAGMENTSHADER_2D);
}
public static int createScreenProgram() {
return GLESTools.createProgram(VERTEXSHADER, FRAGMENTSHADER_2D);
}
public static ShortBuffer getDrawIndecesBuffer() {
ShortBuffer result = ByteBuffer.allocateDirect(SHORT_SIZE_BYTES * drawIndices.length).
order(ByteOrder.nativeOrder()).
asShortBuffer();
result.put(drawIndices);
result.position(0);
return result;
}
public static FloatBuffer getShapeVerticesBuffer() {
FloatBuffer result = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * SquareVertices.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
result.put(SquareVertices);
result.position(0);
return result;
}
public static FloatBuffer getMediaCodecTextureVerticesBuffer() {
FloatBuffer result = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * MediaCodecTextureVertices.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
result.put(MediaCodecTextureVertices);
result.position(0);
return result;
}
public static FloatBuffer getScreenTextureVerticesBuffer() {
FloatBuffer result = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * ScreenTextureVertices.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
result.put(ScreenTextureVertices);
result.position(0);
return result;
}
public static FloatBuffer getCamera2DTextureVerticesBuffer(final int directionFlag, final float cropRatio) {
if (directionFlag == -1) {
FloatBuffer result = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * Cam2dTextureVertices.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
result.put(CamTextureVertices);
result.position(0);
return result;
}
float[] buffer;
switch (directionFlag & 0xF0) {
case RESCoreParameters.FLAG_DIRECTION_ROATATION_90:
buffer = Cam2dTextureVertices_90.clone();
break;
case RESCoreParameters.FLAG_DIRECTION_ROATATION_180:
buffer = Cam2dTextureVertices_180.clone();
break;
case RESCoreParameters.FLAG_DIRECTION_ROATATION_270:
buffer = Cam2dTextureVertices_270.clone();
break;
default:
buffer = Cam2dTextureVertices.clone();
}
if ((directionFlag & 0xF0) == RESCoreParameters.FLAG_DIRECTION_ROATATION_0 || (directionFlag & 0xF0) == RESCoreParameters.FLAG_DIRECTION_ROATATION_180) {
if (cropRatio > 0) {
buffer[1] = buffer[1] == 1.0f ? (1.0f - cropRatio) : cropRatio;
buffer[3] = buffer[3] == 1.0f ? (1.0f - cropRatio) : cropRatio;
buffer[5] = buffer[5] == 1.0f ? (1.0f - cropRatio) : cropRatio;
buffer[7] = buffer[7] == 1.0f ? (1.0f - cropRatio) : cropRatio;
} else {
buffer[0] = buffer[0] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
buffer[2] = buffer[2] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
buffer[4] = buffer[4] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
buffer[6] = buffer[6] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
}
} else {
if (cropRatio > 0) {
buffer[0] = buffer[0] == 1.0f ? (1.0f - cropRatio) : cropRatio;
buffer[2] = buffer[2] == 1.0f ? (1.0f - cropRatio) : cropRatio;
buffer[4] = buffer[4] == 1.0f ? (1.0f - cropRatio) : cropRatio;
buffer[6] = buffer[6] == 1.0f ? (1.0f - cropRatio) : cropRatio;
} else {
buffer[1] = buffer[1] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
buffer[3] = buffer[3] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
buffer[5] = buffer[5] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
buffer[7] = buffer[7] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
}
}
if ((directionFlag & RESCoreParameters.FLAG_DIRECTION_FLIP_HORIZONTAL) != 0) {
buffer[0] = flip(buffer[0]);
buffer[2] = flip(buffer[2]);
buffer[4] = flip(buffer[4]);
buffer[6] = flip(buffer[6]);
}
if ((directionFlag & RESCoreParameters.FLAG_DIRECTION_FLIP_VERTICAL) != 0) {
buffer[1] = flip(buffer[1]);
buffer[3] = flip(buffer[3]);
buffer[5] = flip(buffer[5]);
buffer[7] = flip(buffer[7]);
}
FloatBuffer result = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * buffer.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
result.put(buffer);
result.position(0);
return result;
}
public static FloatBuffer getCameraTextureVerticesBuffer() {
FloatBuffer result = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * Cam2dTextureVertices.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
result.put(CamTextureVertices);
result.position(0);
return result;
}
private static float flip(final float i) {
return (1.0f - i);
}
public static FloatBuffer adjustTextureFlip(boolean flipHorizontal) {
float[] textureCords = getFlip(flipHorizontal, false);
FloatBuffer mTextureBuffer = null;
if (mTextureBuffer == null) {
mTextureBuffer = ByteBuffer.allocateDirect(textureCords.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
}
mTextureBuffer.clear();
mTextureBuffer.put(textureCords).position(0);
return mTextureBuffer;
}
public static float[] getFlip(final boolean flipHorizontal,
final boolean flipVertical) {
float[] rotatedTex = Cam2dTextureVertices;
if (flipHorizontal) {
rotatedTex = new float[]{
flip2(rotatedTex[0]), rotatedTex[1],
flip2(rotatedTex[2]), rotatedTex[3],
flip2(rotatedTex[4]), rotatedTex[5],
flip2(rotatedTex[6]), rotatedTex[7],
};
}
if (flipVertical) {
rotatedTex = new float[]{
rotatedTex[0], flip2(rotatedTex[1]),
rotatedTex[2], flip2(rotatedTex[3]),
rotatedTex[4], flip2(rotatedTex[5]),
rotatedTex[6], flip2(rotatedTex[7]),
};
}
return rotatedTex;
}
private static float flip2(final float i) {
if (i == 0.0f) {
return 1.0f;
}
return 0.0f;
}
}

View File

@ -0,0 +1,137 @@
package me.lake.librestreaming.core;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import java.io.IOException;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.tools.LogTools;
public class MediaCodecHelper {
public static MediaCodec createSoftVideoMediaCodec(RESCoreParameters coreParameters, MediaFormat videoFormat) {
videoFormat.setString(MediaFormat.KEY_MIME, "video/avc");
videoFormat.setInteger(MediaFormat.KEY_WIDTH, coreParameters.videoWidth);
videoFormat.setInteger(MediaFormat.KEY_HEIGHT, coreParameters.videoHeight);
videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, coreParameters.mediacdoecAVCBitRate);
videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, coreParameters.mediacodecAVCFrameRate);
videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, coreParameters.mediacodecAVCIFrameInterval);
videoFormat.setInteger(MediaFormat.KEY_PROFILE, MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline);
videoFormat.setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel31);
videoFormat.setInteger(MediaFormat.KEY_BITRATE_MODE, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR);
MediaCodec result = null;
try {
result = MediaCodec.createEncoderByType(videoFormat.getString(MediaFormat.KEY_MIME));
//select color
int[] colorful = result.getCodecInfo().getCapabilitiesForType(videoFormat.getString(MediaFormat.KEY_MIME)).colorFormats;
int dstVideoColorFormat = -1;
//select mediacodec colorformat
if (isArrayContain(colorful, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar)) {
dstVideoColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
coreParameters.mediacodecAVCColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
}
if (dstVideoColorFormat == -1 && isArrayContain(colorful, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar)) {
dstVideoColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar;
coreParameters.mediacodecAVCColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar;
}
if (dstVideoColorFormat == -1) {
LogTools.e("!!!!!!!!!!!UnSupport,mediaCodecColorFormat");
return null;
}
videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, dstVideoColorFormat);
//selectprofile
// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
// MediaCodecInfo.CodecProfileLevel[] profileLevels = result.getCodecInfo().getCapabilitiesForType(videoFormat.getString(MediaFormat.KEY_MIME)).profileLevels;
// if (isProfileContain(profileLevels, MediaCodecInfo.CodecProfileLevel.AVCProfileMain)) {
// coreParameters.mediacodecAVCProfile = MediaCodecInfo.CodecProfileLevel.AVCProfileMain;
// coreParameters.mediacodecAVClevel = MediaCodecInfo.CodecProfileLevel.AVCLevel31;
// } else {
// coreParameters.mediacodecAVCProfile = MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline;
// coreParameters.mediacodecAVClevel = MediaCodecInfo.CodecProfileLevel.AVCLevel31;
// }
// videoFormat.setInteger(MediaFormat.KEY_PROFILE, coreParameters.mediacodecAVCProfile);
// //level must be set even below M
// videoFormat.setInteger(MediaFormat.KEY_LEVEL, coreParameters.mediacodecAVClevel);
// }
} catch (IOException e) {
LogTools.trace(e);
return null;
}
return result;
}
public static MediaCodec createAudioMediaCodec(RESCoreParameters coreParameters, MediaFormat audioFormat) {
//Audio
MediaCodec result;
audioFormat.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, coreParameters.mediacodecAACProfile);
audioFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, coreParameters.mediacodecAACSampleRate);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, coreParameters.mediacodecAACChannelCount);
audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, coreParameters.mediacodecAACBitRate);
audioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, coreParameters.mediacodecAACMaxInputSize);
LogTools.d("creatingAudioEncoder,format=" + audioFormat.toString());
try {
result = MediaCodec.createEncoderByType(audioFormat.getString(MediaFormat.KEY_MIME));
} catch (Exception e) {
LogTools.trace("can`t create audioEncoder!", e);
return null;
}
return result;
}
public static MediaCodec createHardVideoMediaCodec(RESCoreParameters coreParameters, MediaFormat videoFormat) {
videoFormat.setString(MediaFormat.KEY_MIME, "video/avc");
videoFormat.setInteger(MediaFormat.KEY_WIDTH, coreParameters.videoWidth);
videoFormat.setInteger(MediaFormat.KEY_HEIGHT, coreParameters.videoHeight);
videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, coreParameters.mediacdoecAVCBitRate);
videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, coreParameters.mediacodecAVCFrameRate);
videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, coreParameters.mediacodecAVCIFrameInterval);
videoFormat.setInteger(MediaFormat.KEY_PROFILE, MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline);
videoFormat.setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel31);
videoFormat.setInteger(MediaFormat.KEY_BITRATE_MODE, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR);
videoFormat.setInteger(MediaFormat.KEY_COMPLEXITY, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR);//added by wangshuo
MediaCodec result = null;
try {
result = MediaCodec.createEncoderByType(videoFormat.getString(MediaFormat.KEY_MIME));
//selectprofile
// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
// MediaCodecInfo.CodecProfileLevel[] profileLevels = result.getCodecInfo().getCapabilitiesForType(videoFormat.getString(MediaFormat.KEY_MIME)).profileLevels;
// if (isProfileContain(profileLevels, MediaCodecInfo.CodecProfileLevel.AVCProfileMain)) {
// coreParameters.mediacodecAVCProfile = MediaCodecInfo.CodecProfileLevel.AVCProfileMain;
// coreParameters.mediacodecAVClevel = MediaCodecInfo.CodecProfileLevel.AVCLevel31;
// } else {
// coreParameters.mediacodecAVCProfile = MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline;
// coreParameters.mediacodecAVClevel = MediaCodecInfo.CodecProfileLevel.AVCLevel31;
// }
// videoFormat.setInteger(MediaFormat.KEY_PROFILE, coreParameters.mediacodecAVCProfile);
// //level must be set even below M
// videoFormat.setInteger(MediaFormat.KEY_LEVEL, coreParameters.mediacodecAVClevel);
// }
} catch (IOException e) {
LogTools.trace(e);
return null;
}
return result;
}
private static boolean isArrayContain(int[] src, int target) {
for (int color : src) {
if (color == target) {
return true;
}
}
return false;
}
private static boolean isProfileContain(MediaCodecInfo.CodecProfileLevel[] src, int target) {
for (MediaCodecInfo.CodecProfileLevel color : src) {
if (color.profile == target) {
return true;
}
}
return false;
}
}

View File

@ -0,0 +1,88 @@
package me.lake.librestreaming.core;
import android.media.MediaFormat;
import java.nio.ByteBuffer;
import me.lake.librestreaming.tools.ByteArrayTools;
public class Packager {
public static class H264Packager {
public static byte[] generateAVCDecoderConfigurationRecord(MediaFormat mediaFormat) {
ByteBuffer SPSByteBuff = mediaFormat.getByteBuffer("csd-0");
SPSByteBuff.position(4);
ByteBuffer PPSByteBuff = mediaFormat.getByteBuffer("csd-1");
PPSByteBuff.position(4);
int spslength = SPSByteBuff.remaining();
int ppslength = PPSByteBuff.remaining();
int length = 11 + spslength + ppslength;
byte[] result = new byte[length];
SPSByteBuff.get(result, 8, spslength);
PPSByteBuff.get(result, 8 + spslength + 3, ppslength);
/**
* UB[8]configurationVersion
* UB[8]AVCProfileIndication
* UB[8]profile_compatibility
* UB[8]AVCLevelIndication
* UB[8]lengthSizeMinusOne
*/
result[0] = 0x01;
result[1] = result[9];
result[2] = result[10];
result[3] = result[11];
result[4] = (byte) 0xFF;
/**
* UB[8]numOfSequenceParameterSets
* UB[16]sequenceParameterSetLength
*/
result[5] = (byte) 0xE1;
ByteArrayTools.intToByteArrayTwoByte(result, 6, spslength);
/**
* UB[8]numOfPictureParameterSets
* UB[16]pictureParameterSetLength
*/
int pos = 8 + spslength;
result[pos] = (byte) 0x01;
ByteArrayTools.intToByteArrayTwoByte(result, pos + 1, ppslength);
return result;
}
}
public static class FLVPackager {
public static final int FLV_TAG_LENGTH = 11;
public static final int FLV_VIDEO_TAG_LENGTH = 5;
public static final int FLV_AUDIO_TAG_LENGTH = 2;
public static final int FLV_TAG_FOOTER_LENGTH = 4;
public static final int NALU_HEADER_LENGTH = 4;
public static void fillFlvVideoTag(byte[] dst, int pos, boolean isAVCSequenceHeader, boolean isIDR, int readDataLength) {
//FrameType&CodecID
dst[pos] = isIDR ? (byte) 0x17 : (byte) 0x27;
//AVCPacketType
dst[pos + 1] = isAVCSequenceHeader ? (byte) 0x00 : (byte) 0x01;
//LAKETODO CompositionTime
dst[pos + 2] = 0x00;
dst[pos + 3] = 0x00;
dst[pos + 4] = 0x00;
if (!isAVCSequenceHeader) {
//NALU HEADER
ByteArrayTools.intToByteArrayFull(dst, pos + 5, readDataLength);
}
}
public static void fillFlvAudioTag(byte[] dst, int pos, boolean isAACSequenceHeader) {
/**
* UB[4] 10=AAC
* UB[2] 3=44kHz
* UB[1] 1=16-bit
* UB[1] 0=MonoSound
*/
dst[pos] = (byte) 0xAE;
dst[pos + 1] = isAACSequenceHeader ? (byte) 0x00 : (byte) 0x01;
}
}
}

View File

@ -0,0 +1,57 @@
package me.lake.librestreaming.core;
import java.util.LinkedList;
public class RESByteSpeedometer {
private int timeGranularity;
private LinkedList<ByteFrame> byteList;
private final Object syncByteList = new Object();
public RESByteSpeedometer(int timeGranularity) {
this.timeGranularity = timeGranularity;
byteList = new LinkedList<>();
}
public int getSpeed() {
synchronized (syncByteList) {
long now = System.currentTimeMillis();
trim(now);
long sumByte = 0;
for (ByteFrame byteFrame : byteList) {
sumByte += byteFrame.bytenum;
}
return (int) (sumByte * 1000 / timeGranularity);
}
}
public void gain(int byteCount) {
synchronized (syncByteList) {
long now = System.currentTimeMillis();
byteList.addLast(new ByteFrame(now, byteCount));
trim(now);
}
}
private void trim(long time) {
while (!byteList.isEmpty() && (time - byteList.getFirst().time) > timeGranularity) {
byteList.removeFirst();
}
}
public void reset() {
synchronized (syncByteList) {
byteList.clear();
}
}
private class ByteFrame {
long time;
long bytenum;
public ByteFrame(long time, long bytenum) {
this.time = time;
this.bytenum = bytenum;
}
}
}

View File

@ -0,0 +1,44 @@
package me.lake.librestreaming.core;
public class RESFrameRateMeter {
private static final long TIMETRAVEL = 1;
private static final long TIMETRAVEL_MS = TIMETRAVEL * 1000;
private static final long GET_TIMETRAVEL_MS = 2 * TIMETRAVEL_MS;
private int times;
private float lastFps;
private long lastUpdateTime;
public RESFrameRateMeter() {
times = 0;
lastFps = 0;
lastUpdateTime = 0;
}
public void count() {
long now = System.currentTimeMillis();
if (lastUpdateTime == 0) {
lastUpdateTime = now;
}
if ((now - lastUpdateTime) > TIMETRAVEL_MS) {
lastFps = (((float) times) / (now - lastUpdateTime)) * 1000.0f;
lastUpdateTime = now;
times = 0;
}
++times;
}
public float getFps() {
if ((System.currentTimeMillis() - lastUpdateTime) > GET_TIMETRAVEL_MS) {
return 0;
} else {
return lastFps;
}
}
public void reSet() {
times = 0;
lastFps = 0;
lastUpdateTime = 0;
}
}

View File

@ -0,0 +1,891 @@
package me.lake.librestreaming.core;
import android.annotation.TargetApi;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.opengl.EGL14;
import android.opengl.EGLExt;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.os.SystemClock;
import android.view.Surface;
import java.nio.Buffer;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.nio.ShortBuffer;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import me.lake.librestreaming.client.CallbackDelivery;
import me.lake.librestreaming.core.listener.RESScreenShotListener;
import me.lake.librestreaming.core.listener.RESVideoChangeListener;
import me.lake.librestreaming.encoder.MediaVideoEncoder;
import me.lake.librestreaming.filter.hardvideofilter.BaseHardVideoFilter;
import me.lake.librestreaming.model.MediaCodecGLWapper;
import me.lake.librestreaming.model.OffScreenGLWapper;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.model.ScreenGLWapper;
import me.lake.librestreaming.model.Size;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.tools.LogTools;
public class RESHardVideoCore implements RESVideoCore {
RESCoreParameters resCoreParameters;
private final Object syncOp = new Object();
//filter
private Lock lockVideoFilter = null;
private BaseHardVideoFilter videoFilter;
private MediaCodec dstVideoEncoder;
private MediaFormat dstVideoFormat;
private final Object syncPreview = new Object();
private HandlerThread videoGLHandlerThread;
private VideoGLHandler videoGLHander;
final private Object syncResScreenShotListener = new Object();
private RESScreenShotListener resScreenShotListener;
final private Object syncResVideoChangeListener = new Object();
private RESVideoChangeListener resVideoChangeListener;
private final Object syncIsLooping = new Object();
private boolean isPreviewing = false;
private boolean isStreaming = false;
private int loopingInterval;
private boolean isEnableMirror;
private boolean isEnablePreviewMirror;
private boolean isEnableStreamMirror;
public RESHardVideoCore(RESCoreParameters parameters) {
resCoreParameters = parameters;
lockVideoFilter = new ReentrantLock(false);
}
public void onFrameAvailable() {
if (videoGLHandlerThread != null) {
videoGLHander.addFrameNum();
}
}
@Override
public boolean prepare(RESConfig resConfig) {
synchronized (syncOp) {
resCoreParameters.renderingMode = resConfig.getRenderingMode();
resCoreParameters.mediacdoecAVCBitRate = resConfig.getBitRate();
resCoreParameters.videoBufferQueueNum = resConfig.getVideoBufferQueueNum();
resCoreParameters.mediacodecAVCIFrameInterval = resConfig.getVideoGOP();
resCoreParameters.mediacodecAVCFrameRate = resCoreParameters.videoFPS;
loopingInterval = 1000 / resCoreParameters.videoFPS;
dstVideoFormat = new MediaFormat();
videoGLHandlerThread = new HandlerThread("GLThread");
videoGLHandlerThread.start();
videoGLHander = new VideoGLHandler(videoGLHandlerThread.getLooper());
videoGLHander.sendEmptyMessage(VideoGLHandler.WHAT_INIT);
return true;
}
}
@Override
public void startPreview(SurfaceTexture surfaceTexture, int visualWidth, int visualHeight) {
synchronized (syncOp) {
videoGLHander.sendMessage(videoGLHander.obtainMessage(VideoGLHandler.WHAT_START_PREVIEW,
visualWidth, visualHeight, surfaceTexture));
synchronized (syncIsLooping) {
if (!isPreviewing && !isStreaming) {
videoGLHander.removeMessages(VideoGLHandler.WHAT_DRAW);
videoGLHander.sendMessageDelayed(videoGLHander.obtainMessage(VideoGLHandler.WHAT_DRAW, SystemClock.uptimeMillis() + loopingInterval), loopingInterval);
}
isPreviewing = true;
}
}
}
@Override
public void updatePreview(int visualWidth, int visualHeight) {
synchronized (syncOp) {
synchronized (syncPreview) {
videoGLHander.updatePreview(visualWidth, visualHeight);
}
}
}
@Override
public void stopPreview(boolean releaseTexture) {
synchronized (syncOp) {
videoGLHander.sendMessage(videoGLHander.obtainMessage(VideoGLHandler.WHAT_STOP_PREVIEW, releaseTexture));
synchronized (syncIsLooping) {
isPreviewing = false;
}
}
}
@Override
public boolean startStreaming(RESFlvDataCollecter flvDataCollecter) {
synchronized (syncOp) {
videoGLHander.sendMessage(videoGLHander.obtainMessage(VideoGLHandler.WHAT_START_STREAMING, flvDataCollecter));
synchronized (syncIsLooping) {
if (!isPreviewing && !isStreaming) {
videoGLHander.removeMessages(VideoGLHandler.WHAT_DRAW);
videoGLHander.sendMessageDelayed(videoGLHander.obtainMessage(VideoGLHandler.WHAT_DRAW, SystemClock.uptimeMillis() + loopingInterval), loopingInterval);
}
isStreaming = true;
}
}
return true;
}
@Override
public void updateCamTexture(SurfaceTexture camTex) {
synchronized (syncOp) {
if (videoGLHander != null) {
videoGLHander.updateCamTexture(camTex);
}
}
}
@Override
public boolean stopStreaming() {
synchronized (syncOp) {
videoGLHander.sendEmptyMessage(VideoGLHandler.WHAT_STOP_STREAMING);
synchronized (syncIsLooping) {
isStreaming = false;
}
}
return true;
}
@Override
public boolean destroy() {
synchronized (syncOp) {
videoGLHander.sendEmptyMessage(VideoGLHandler.WHAT_UNINIT);
videoGLHandlerThread.quitSafely();
try {
videoGLHandlerThread.join();
} catch (InterruptedException ignored) {
}
videoGLHandlerThread = null;
videoGLHander = null;
return true;
}
}
@TargetApi(Build.VERSION_CODES.KITKAT)
@Override
public void reSetVideoBitrate(int bitrate) {
synchronized (syncOp) {
if (videoGLHander != null) {
videoGLHander.sendMessage(videoGLHander.obtainMessage(VideoGLHandler.WHAT_RESET_BITRATE, bitrate, 0));
resCoreParameters.mediacdoecAVCBitRate = bitrate;
dstVideoFormat.setInteger(MediaFormat.KEY_BIT_RATE, resCoreParameters.mediacdoecAVCBitRate);
}
}
}
@TargetApi(Build.VERSION_CODES.KITKAT)
@Override
public int getVideoBitrate() {
synchronized (syncOp) {
return resCoreParameters.mediacdoecAVCBitRate;
}
}
@Override
public void reSetVideoFPS(int fps) {
synchronized (syncOp) {
resCoreParameters.videoFPS = fps;
loopingInterval = 1000 / resCoreParameters.videoFPS;
}
}
@Override
public void reSetVideoSize(RESCoreParameters newParameters) {
synchronized (syncOp) {
synchronized (syncIsLooping) {
if (isPreviewing || isStreaming) {
videoGLHander.sendMessage(videoGLHander.obtainMessage(VideoGLHandler.WHAT_RESET_VIDEO, newParameters));
}
}
}
}
@Override
public void setCurrentCamera(int cameraIndex) {
mCameraId = cameraIndex;
synchronized (syncOp) {
if (videoGLHander != null) {
videoGLHander.updateCameraIndex(cameraIndex);
}
}
}
public BaseHardVideoFilter acquireVideoFilter() {
lockVideoFilter.lock();
return videoFilter;
}
public void releaseVideoFilter() {
lockVideoFilter.unlock();
}
public void setVideoFilter(BaseHardVideoFilter baseHardVideoFilter) {
lockVideoFilter.lock();
videoFilter = baseHardVideoFilter;
lockVideoFilter.unlock();
}
@Override
public void takeScreenShot(RESScreenShotListener listener) {
synchronized (syncResScreenShotListener) {
resScreenShotListener = listener;
}
}
public void setMirror(boolean isEnableMirror,boolean isEnablePreviewMirror,boolean isEnableStreamMirror) {
this.isEnableMirror = isEnableMirror;
this.isEnablePreviewMirror = isEnablePreviewMirror;
this.isEnableStreamMirror = isEnableStreamMirror;
}
@Override
public void setVideoChangeListener(RESVideoChangeListener listener) {
synchronized (syncResVideoChangeListener) {
resVideoChangeListener = listener;
}
}
@Override
public float getDrawFrameRate() {
synchronized (syncOp) {
return videoGLHander == null ? 0 : videoGLHander.getDrawFrameRate();
}
}
private class VideoGLHandler extends Handler {
static final int WHAT_INIT = 0x001;
static final int WHAT_UNINIT = 0x002;
static final int WHAT_FRAME = 0x003;
static final int WHAT_DRAW = 0x004;
static final int WHAT_RESET_VIDEO = 0x005;
static final int WHAT_START_PREVIEW = 0x010;
static final int WHAT_STOP_PREVIEW = 0x020;
static final int WHAT_START_STREAMING = 0x100;
static final int WHAT_STOP_STREAMING = 0x200;
static final int WHAT_RESET_BITRATE = 0x300;
private Size screenSize;
//=========================
public static final int FILTER_LOCK_TOLERATION = 3;//3ms
private final Object syncFrameNum = new Object();
private int frameNum = 0;
//gl stuff
private final Object syncCameraTex = new Object();
private SurfaceTexture cameraTexture;
private SurfaceTexture screenTexture;
private MediaCodecGLWapper mediaCodecGLWapper;
private ScreenGLWapper screenGLWapper;
private OffScreenGLWapper offScreenGLWapper;
private int sample2DFrameBuffer;
private int sample2DFrameBufferTexture;
private int frameBuffer;
private int frameBufferTexture;
private FloatBuffer shapeVerticesBuffer;
private FloatBuffer mediaCodecTextureVerticesBuffer;
private FloatBuffer screenTextureVerticesBuffer;
private int currCamera;
private final Object syncCameraTextureVerticesBuffer = new Object();
private FloatBuffer camera2dTextureVerticesBuffer;
private FloatBuffer cameraTextureVerticesBuffer;
private ShortBuffer drawIndecesBuffer;
private BaseHardVideoFilter innerVideoFilter = null;
private RESFrameRateMeter drawFrameRateMeter;
private int directionFlag;
//sender
private VideoSenderThread videoSenderThread;
boolean hasNewFrame = false;
public boolean dropNextFrame = false;
float[] textureMatrix;
public VideoGLHandler(Looper looper) {
super(looper);
screenGLWapper = null;
mediaCodecGLWapper = null;
drawFrameRateMeter = new RESFrameRateMeter();
screenSize = new Size(1, 1);
initBuffer();
}
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case WHAT_FRAME: {
GLHelper.makeCurrent(offScreenGLWapper);
synchronized (syncFrameNum) {
synchronized (syncCameraTex) {
if (cameraTexture != null) {
while (frameNum != 0) {
cameraTexture.updateTexImage();
--frameNum;
if (!dropNextFrame) {
hasNewFrame = true;
} else {
dropNextFrame = false;
hasNewFrame=false;
}
}
} else {
break;
}
}
}
drawSample2DFrameBuffer(cameraTexture);
}
break;
case WHAT_DRAW: {
long time = (Long) msg.obj;
long interval = time + loopingInterval - SystemClock.uptimeMillis();
synchronized (syncIsLooping) {
if (isPreviewing || isStreaming) {
if (interval > 0) {
videoGLHander.sendMessageDelayed(videoGLHander.obtainMessage(
VideoGLHandler.WHAT_DRAW,
SystemClock.uptimeMillis() + interval),
interval);
} else {
videoGLHander.sendMessage(videoGLHander.obtainMessage(
VideoGLHandler.WHAT_DRAW,
SystemClock.uptimeMillis() + loopingInterval));
}
}
}
if (hasNewFrame) {
drawFrameBuffer();
drawMediaCodec(time * 1000000);
drawScreen();
encoderMp4(frameBufferTexture);//编码MP4
drawFrameRateMeter.count();
hasNewFrame = false;
}
}
break;
case WHAT_INIT: {
initOffScreenGL();
}
break;
case WHAT_UNINIT: {
lockVideoFilter.lock();
if (innerVideoFilter != null) {
innerVideoFilter.onDestroy();
innerVideoFilter = null;
}
lockVideoFilter.unlock();
uninitOffScreenGL();
}
break;
case WHAT_START_PREVIEW: {
initScreenGL((SurfaceTexture) msg.obj);
updatePreview(msg.arg1, msg.arg2);
}
break;
case WHAT_STOP_PREVIEW: {
uninitScreenGL();
boolean releaseTexture = (boolean) msg.obj;
if (releaseTexture) {
screenTexture.release();
screenTexture = null;
}
}
break;
case WHAT_START_STREAMING: {
if (dstVideoEncoder == null) {
dstVideoEncoder = MediaCodecHelper.createHardVideoMediaCodec(resCoreParameters, dstVideoFormat);
if (dstVideoEncoder == null) {
throw new RuntimeException("create Video MediaCodec failed");
}
}
dstVideoEncoder.configure(dstVideoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
initMediaCodecGL(dstVideoEncoder.createInputSurface());
dstVideoEncoder.start();
videoSenderThread = new VideoSenderThread("VideoSenderThread", dstVideoEncoder, (RESFlvDataCollecter) msg.obj);
videoSenderThread.start();
}
break;
case WHAT_STOP_STREAMING: {
videoSenderThread.quit();
try {
videoSenderThread.join();
} catch (InterruptedException e) {
LogTools.trace("RESHardVideoCore,stopStreaming()failed", e);
}
videoSenderThread = null;
uninitMediaCodecGL();
dstVideoEncoder.stop();
dstVideoEncoder.release();
dstVideoEncoder = null;
}
break;
case WHAT_RESET_BITRATE: {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT && mediaCodecGLWapper != null) {
Bundle bitrateBundle = new Bundle();
bitrateBundle.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, msg.arg1);
dstVideoEncoder.setParameters(bitrateBundle);
}
}
break;
case WHAT_RESET_VIDEO: {
RESCoreParameters newParameters = (RESCoreParameters) msg.obj;
resCoreParameters.videoWidth = newParameters.videoWidth;
resCoreParameters.videoHeight = newParameters.videoHeight;
resCoreParameters.cropRatio = newParameters.cropRatio;
updateCameraIndex(currCamera);
resetFrameBuff();
if (mediaCodecGLWapper != null) {
uninitMediaCodecGL();
dstVideoEncoder.stop();
dstVideoEncoder.release();
dstVideoEncoder = MediaCodecHelper.createHardVideoMediaCodec(resCoreParameters, dstVideoFormat);
if (dstVideoEncoder == null) {
throw new RuntimeException("create Video MediaCodec failed");
}
dstVideoEncoder.configure(dstVideoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
initMediaCodecGL(dstVideoEncoder.createInputSurface());
dstVideoEncoder.start();
videoSenderThread.updateMediaCodec(dstVideoEncoder);
}
synchronized (syncResVideoChangeListener) {
if(resVideoChangeListener!=null) {
CallbackDelivery.i().post(new RESVideoChangeListener.RESVideoChangeRunable(resVideoChangeListener,
resCoreParameters.videoWidth,
resCoreParameters.videoHeight));
}
}
}
break;
default:
}
}
private void drawSample2DFrameBuffer(SurfaceTexture cameraTexture) {
if(isEnableMirror){
screenTextureVerticesBuffer = GLHelper.adjustTextureFlip(isEnablePreviewMirror);
mediaCodecTextureVerticesBuffer = GLHelper.adjustTextureFlip(isEnableStreamMirror);
}
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, sample2DFrameBuffer);
GLES20.glUseProgram(offScreenGLWapper.cam2dProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, OVERWATCH_TEXTURE_ID);
GLES20.glUniform1i(offScreenGLWapper.cam2dTextureLoc, 0);
synchronized (syncCameraTextureVerticesBuffer) {
GLHelper.enableVertex(offScreenGLWapper.cam2dPostionLoc, offScreenGLWapper.cam2dTextureCoordLoc,
shapeVerticesBuffer, camera2dTextureVerticesBuffer);
}
textureMatrix = new float[16];
cameraTexture.getTransformMatrix(textureMatrix);
//encoder mp4 start
//processStMatrix(textureMatrix, mCameraID == Camera.CameraInfo.CAMERA_FACING_FRONT);
//encoder mp4 end
GLES20.glUniformMatrix4fv(offScreenGLWapper.cam2dTextureMatrix, 1, false, textureMatrix, 0);
GLES20.glViewport(0, 0, resCoreParameters.previewVideoHeight, resCoreParameters.previewVideoWidth);//resCoreParameters.videoWidth, resCoreParameters.videoHeight
doGLDraw();
GLES20.glFinish();
GLHelper.disableVertex(offScreenGLWapper.cam2dPostionLoc, offScreenGLWapper.cam2dTextureCoordLoc);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
GLES20.glUseProgram(0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}
private void drawOriginFrameBuffer() {
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer);
GLES20.glUseProgram(offScreenGLWapper.camProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, sample2DFrameBufferTexture);
GLES20.glUniform1i(offScreenGLWapper.camTextureLoc, 0);
synchronized (syncCameraTextureVerticesBuffer) {
GLHelper.enableVertex(offScreenGLWapper.camPostionLoc, offScreenGLWapper.camTextureCoordLoc,
shapeVerticesBuffer, cameraTextureVerticesBuffer);
}
GLES20.glViewport(0, 0, resCoreParameters.previewVideoHeight, resCoreParameters.previewVideoWidth);
doGLDraw();
GLES20.glFinish();
GLHelper.disableVertex(offScreenGLWapper.camPostionLoc, offScreenGLWapper.camTextureCoordLoc);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glUseProgram(0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}
private void drawFrameBuffer() {
GLHelper.makeCurrent(offScreenGLWapper);
boolean isFilterLocked = lockVideoFilter();
long starttime = System.currentTimeMillis();
if (isFilterLocked) {
if (videoFilter != innerVideoFilter) {
if (innerVideoFilter != null) {
innerVideoFilter.onDestroy();
}
innerVideoFilter = videoFilter;
if (innerVideoFilter != null) {
innerVideoFilter.onInit(resCoreParameters.previewVideoHeight, resCoreParameters.previewVideoWidth);//resCoreParameters.videoWidth, resCoreParameters.videoHeight
}
}
if (innerVideoFilter != null) {
synchronized (syncCameraTextureVerticesBuffer) {
innerVideoFilter.onDirectionUpdate(directionFlag);
innerVideoFilter.onDraw(sample2DFrameBufferTexture, frameBuffer, shapeVerticesBuffer, cameraTextureVerticesBuffer);
}
} else {
drawOriginFrameBuffer();
}
unlockVideoFilter();
} else {
drawOriginFrameBuffer();
}
LogTools.e("滤镜耗时:"+(System.currentTimeMillis()-starttime));
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer);
checkScreenShot();
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}
private void drawMediaCodec(long currTime) {
if (mediaCodecGLWapper != null) {
GLHelper.makeCurrent(mediaCodecGLWapper);
GLES20.glUseProgram(mediaCodecGLWapper.drawProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, frameBufferTexture);
GLES20.glUniform1i(mediaCodecGLWapper.drawTextureLoc, 0);
GLHelper.enableVertex(mediaCodecGLWapper.drawPostionLoc, mediaCodecGLWapper.drawTextureCoordLoc,
shapeVerticesBuffer, mediaCodecTextureVerticesBuffer);
doGLDraw();
GLES20.glFinish();
GLHelper.disableVertex(mediaCodecGLWapper.drawPostionLoc, mediaCodecGLWapper.drawTextureCoordLoc);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glUseProgram(0);
EGLExt.eglPresentationTimeANDROID(mediaCodecGLWapper.eglDisplay, mediaCodecGLWapper.eglSurface, currTime);
if (!EGL14.eglSwapBuffers(mediaCodecGLWapper.eglDisplay, mediaCodecGLWapper.eglSurface)) {
throw new RuntimeException("eglSwapBuffers,failed!");
}
}
}
private void drawScreen() {
if (screenGLWapper != null) {
GLHelper.makeCurrent(screenGLWapper);
GLES20.glUseProgram(screenGLWapper.drawProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, frameBufferTexture);
GLES20.glUniform1i(screenGLWapper.drawTextureLoc, 0);
GLHelper.enableVertex(screenGLWapper.drawPostionLoc, screenGLWapper.drawTextureCoordLoc,
shapeVerticesBuffer, screenTextureVerticesBuffer);
GLES20.glViewport(0, 0, screenSize.getWidth(), screenSize.getHeight());
doGLDraw();
GLES20.glFinish();
GLHelper.disableVertex(screenGLWapper.drawPostionLoc, screenGLWapper.drawTextureCoordLoc);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glUseProgram(0);
if (!EGL14.eglSwapBuffers(screenGLWapper.eglDisplay, screenGLWapper.eglSurface)) {
throw new RuntimeException("eglSwapBuffers,failed!");
}
}
}
private void doGLDraw() {
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawIndecesBuffer.limit(), GLES20.GL_UNSIGNED_SHORT, drawIndecesBuffer);
}
/**
* @return ture if filter locked & filter!=null
*/
private boolean lockVideoFilter() {
try {
return lockVideoFilter.tryLock(FILTER_LOCK_TOLERATION, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
return false;
}
}
private void unlockVideoFilter() {
lockVideoFilter.unlock();
}
private void checkScreenShot() {
synchronized (syncResScreenShotListener) {
if (resScreenShotListener != null) {
Bitmap result = null;
try {
IntBuffer pixBuffer = IntBuffer.allocate(resCoreParameters.previewVideoHeight * resCoreParameters.previewVideoWidth);
GLES20.glReadPixels(0, 0, resCoreParameters.previewVideoHeight, resCoreParameters.previewVideoWidth, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixBuffer);
int[] glPixel = pixBuffer.array();
int[] argbPixel = new int[resCoreParameters.previewVideoHeight * resCoreParameters.previewVideoWidth];
ColorHelper.FIXGLPIXEL(glPixel, argbPixel,
resCoreParameters.previewVideoHeight,
resCoreParameters.previewVideoWidth
);
result = Bitmap.createBitmap(argbPixel,
resCoreParameters.previewVideoHeight,
resCoreParameters.previewVideoWidth,
Bitmap.Config.ARGB_8888);
if(isEnableMirror && isEnablePreviewMirror){
Matrix mx = new Matrix();
mx.setScale(-1, 1); //产生镜像
result = Bitmap.createBitmap(result,0,0,result.getWidth(),result.getHeight(),mx,true);
}
System.out.println("resCoreParameters.previewVideoWidth = " + resCoreParameters.previewVideoWidth);
System.out.println("resCoreParameters.previewVideoHeight = " + resCoreParameters.previewVideoHeight);
} catch (Exception e) {
LogTools.trace("takescreenshot failed:", e);
} finally {
CallbackDelivery.i().post(new RESScreenShotListener.RESScreenShotListenerRunable(resScreenShotListener, result));
resScreenShotListener = null;
}
}
}
}
private void initOffScreenGL() {
if (offScreenGLWapper == null) {
offScreenGLWapper = new OffScreenGLWapper();
GLHelper.initOffScreenGL(offScreenGLWapper);
GLHelper.makeCurrent(offScreenGLWapper);
//camera
offScreenGLWapper.camProgram = GLHelper.createCameraProgram();
GLES20.glUseProgram(offScreenGLWapper.camProgram);
offScreenGLWapper.camTextureLoc = GLES20.glGetUniformLocation(offScreenGLWapper.camProgram, "uTexture");
offScreenGLWapper.camPostionLoc = GLES20.glGetAttribLocation(offScreenGLWapper.camProgram, "aPosition");
offScreenGLWapper.camTextureCoordLoc = GLES20.glGetAttribLocation(offScreenGLWapper.camProgram, "aTextureCoord");
//camera2d
offScreenGLWapper.cam2dProgram = GLHelper.createCamera2DProgram();
GLES20.glUseProgram(offScreenGLWapper.cam2dProgram);
offScreenGLWapper.cam2dTextureLoc = GLES20.glGetUniformLocation(offScreenGLWapper.cam2dProgram, "uTexture");
offScreenGLWapper.cam2dPostionLoc = GLES20.glGetAttribLocation(offScreenGLWapper.cam2dProgram, "aPosition");
offScreenGLWapper.cam2dTextureCoordLoc = GLES20.glGetAttribLocation(offScreenGLWapper.cam2dProgram, "aTextureCoord");
offScreenGLWapper.cam2dTextureMatrix = GLES20.glGetUniformLocation(offScreenGLWapper.cam2dProgram, "uTextureMatrix");
int[] fb = new int[1], fbt = new int[1];
GLHelper.createCamFrameBuff(fb, fbt, resCoreParameters.previewVideoHeight, resCoreParameters.previewVideoWidth);//resCoreParameters.videoWidth, resCoreParameters.videoHeight
sample2DFrameBuffer = fb[0];
sample2DFrameBufferTexture = fbt[0];
GLHelper.createCamFrameBuff(fb, fbt, resCoreParameters.previewVideoHeight, resCoreParameters.previewVideoWidth);//resCoreParameters.videoWidth, resCoreParameters.videoHeight
frameBuffer = fb[0];
frameBufferTexture = fbt[0];
} else {
throw new IllegalStateException("initOffScreenGL without uninitOffScreenGL");
}
}
private void uninitOffScreenGL() {
if (offScreenGLWapper != null) {
GLHelper.makeCurrent(offScreenGLWapper);
GLES20.glDeleteProgram(offScreenGLWapper.camProgram);
GLES20.glDeleteProgram(offScreenGLWapper.cam2dProgram);
GLES20.glDeleteFramebuffers(1, new int[]{frameBuffer}, 0);
GLES20.glDeleteTextures(1, new int[]{frameBufferTexture}, 0);
GLES20.glDeleteFramebuffers(1, new int[]{sample2DFrameBuffer}, 0);
GLES20.glDeleteTextures(1, new int[]{sample2DFrameBufferTexture}, 0);
EGL14.eglDestroySurface(offScreenGLWapper.eglDisplay, offScreenGLWapper.eglSurface);
EGL14.eglDestroyContext(offScreenGLWapper.eglDisplay, offScreenGLWapper.eglContext);
EGL14.eglTerminate(offScreenGLWapper.eglDisplay);
EGL14.eglMakeCurrent(offScreenGLWapper.eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
} else {
throw new IllegalStateException("uninitOffScreenGL without initOffScreenGL");
}
}
private void initScreenGL(SurfaceTexture screenSurfaceTexture) {
if (screenGLWapper == null) {
screenTexture = screenSurfaceTexture;
screenGLWapper = new ScreenGLWapper();
GLHelper.initScreenGL(screenGLWapper, offScreenGLWapper.eglContext, screenSurfaceTexture);
GLHelper.makeCurrent(screenGLWapper);
screenGLWapper.drawProgram = GLHelper.createScreenProgram();
GLES20.glUseProgram(screenGLWapper.drawProgram);
screenGLWapper.drawTextureLoc = GLES20.glGetUniformLocation(screenGLWapper.drawProgram, "uTexture");
screenGLWapper.drawPostionLoc = GLES20.glGetAttribLocation(screenGLWapper.drawProgram, "aPosition");
screenGLWapper.drawTextureCoordLoc = GLES20.glGetAttribLocation(screenGLWapper.drawProgram, "aTextureCoord");
} else {
throw new IllegalStateException("initScreenGL without unInitScreenGL");
}
}
private void uninitScreenGL() {
if (screenGLWapper != null) {
GLHelper.makeCurrent(screenGLWapper);
GLES20.glDeleteProgram(screenGLWapper.drawProgram);
EGL14.eglDestroySurface(screenGLWapper.eglDisplay, screenGLWapper.eglSurface);
EGL14.eglDestroyContext(screenGLWapper.eglDisplay, screenGLWapper.eglContext);
EGL14.eglTerminate(screenGLWapper.eglDisplay);
EGL14.eglMakeCurrent(screenGLWapper.eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
screenGLWapper = null;
} else {
throw new IllegalStateException("unInitScreenGL without initScreenGL");
}
}
private void initMediaCodecGL(Surface mediacodecSurface) {
if (mediaCodecGLWapper == null) {
mediaCodecGLWapper = new MediaCodecGLWapper();
GLHelper.initMediaCodecGL(mediaCodecGLWapper, offScreenGLWapper.eglContext, mediacodecSurface);
GLHelper.makeCurrent(mediaCodecGLWapper);
GLES20.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
mediaCodecGLWapper.drawProgram = GLHelper.createMediaCodecProgram();
GLES20.glUseProgram(mediaCodecGLWapper.drawProgram);
mediaCodecGLWapper.drawTextureLoc = GLES20.glGetUniformLocation(mediaCodecGLWapper.drawProgram, "uTexture");
mediaCodecGLWapper.drawPostionLoc = GLES20.glGetAttribLocation(mediaCodecGLWapper.drawProgram, "aPosition");
mediaCodecGLWapper.drawTextureCoordLoc = GLES20.glGetAttribLocation(mediaCodecGLWapper.drawProgram, "aTextureCoord");
} else {
throw new IllegalStateException("initMediaCodecGL without uninitMediaCodecGL");
}
}
private void uninitMediaCodecGL() {
if (mediaCodecGLWapper != null) {
GLHelper.makeCurrent(mediaCodecGLWapper);
GLES20.glDeleteProgram(mediaCodecGLWapper.drawProgram);
EGL14.eglDestroySurface(mediaCodecGLWapper.eglDisplay, mediaCodecGLWapper.eglSurface);
EGL14.eglDestroyContext(mediaCodecGLWapper.eglDisplay, mediaCodecGLWapper.eglContext);
EGL14.eglTerminate(mediaCodecGLWapper.eglDisplay);
EGL14.eglMakeCurrent(mediaCodecGLWapper.eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
mediaCodecGLWapper = null;
} else {
throw new IllegalStateException("uninitMediaCodecGL without initMediaCodecGL");
}
}
private void resetFrameBuff() {
GLHelper.makeCurrent(offScreenGLWapper);
GLES20.glDeleteFramebuffers(1, new int[]{frameBuffer}, 0);
GLES20.glDeleteTextures(1, new int[]{frameBufferTexture}, 0);
GLES20.glDeleteFramebuffers(1, new int[]{sample2DFrameBuffer}, 0);
GLES20.glDeleteTextures(1, new int[]{sample2DFrameBufferTexture}, 0);
int[] fb = new int[1], fbt = new int[1];
GLHelper.createCamFrameBuff(fb, fbt, resCoreParameters.videoWidth, resCoreParameters.videoHeight);
sample2DFrameBuffer = fb[0];
sample2DFrameBufferTexture = fbt[0];
GLHelper.createCamFrameBuff(fb, fbt, resCoreParameters.videoWidth, resCoreParameters.videoHeight);
frameBuffer = fb[0];
frameBufferTexture = fbt[0];
}
private void initBuffer() {
shapeVerticesBuffer = GLHelper.getShapeVerticesBuffer();
mediaCodecTextureVerticesBuffer = GLHelper.getMediaCodecTextureVerticesBuffer();
screenTextureVerticesBuffer = GLHelper.getScreenTextureVerticesBuffer();
updateCameraIndex(currCamera);
drawIndecesBuffer = GLHelper.getDrawIndecesBuffer();
cameraTextureVerticesBuffer = GLHelper.getCameraTextureVerticesBuffer();
}
public void updateCameraIndex(int cameraIndex) {
synchronized (syncCameraTextureVerticesBuffer) {
currCamera = cameraIndex;
if (currCamera == Camera.CameraInfo.CAMERA_FACING_FRONT) {
directionFlag = resCoreParameters.frontCameraDirectionMode ^ RESConfig.DirectionMode.FLAG_DIRECTION_FLIP_HORIZONTAL;
} else {
directionFlag = resCoreParameters.backCameraDirectionMode ^ RESConfig.DirectionMode.FLAG_DIRECTION_ROATATION_0;
}
camera2dTextureVerticesBuffer = GLHelper.getCamera2DTextureVerticesBuffer(directionFlag, resCoreParameters.cropRatio);
}
}
public float getDrawFrameRate() {
return drawFrameRateMeter.getFps();
}
public void updateCamTexture(SurfaceTexture surfaceTexture) {
synchronized (syncCameraTex) {
if (surfaceTexture != cameraTexture) {
cameraTexture = surfaceTexture;
frameNum = 0;
dropNextFrame = true;
}
}
}
public void addFrameNum() {
synchronized (syncFrameNum) {
++frameNum;
this.removeMessages(WHAT_FRAME);
this.sendMessageAtFrontOfQueue(this.obtainMessage(VideoGLHandler.WHAT_FRAME));
}
}
public void updatePreview(int w, int h) {
screenSize = new Size(w, h);
}
public int getBufferTexture(){
return frameBufferTexture;
}
private void encoderMp4(int BufferTexture) {
synchronized (this) {
if (mVideoEncoder != null) {
processStMatrix(textureMatrix, mCameraId == Camera.CameraInfo.CAMERA_FACING_FRONT);
if (mNeedResetEglContext) {
mVideoEncoder.setEglContext(EGL14.eglGetCurrentContext(), videoGLHander.getBufferTexture());
mNeedResetEglContext = false;
}
mVideoEncoder.setPreviewWH(resCoreParameters.previewVideoHeight, resCoreParameters.previewVideoWidth);
mVideoEncoder.frameAvailableSoon(textureMatrix, mVideoEncoder.getMvpMatrix());
}
}
}
}
//encoder mp4 start
private MediaVideoEncoder mVideoEncoder;
private boolean mNeedResetEglContext = true;
private int mCameraId = -1;
public void setVideoEncoder(final MediaVideoEncoder encoder) {
synchronized (this) {
if (encoder != null) {
encoder.setEglContext(EGL14.eglGetCurrentContext(), videoGLHander.getBufferTexture());
}
mVideoEncoder = encoder;
}
}
private void processStMatrix(float[] matrix, boolean needMirror) {
if (needMirror && matrix != null && matrix.length == 16) {
for (int i = 0; i < 3; i++) {
matrix[4 * i] = -matrix[4 * i];
}
if (matrix[4 * 3] == 0) {
matrix[4 * 3] = 1.0f;
} else if (matrix[4 * 3] == 1.0f) {
matrix[4 * 3] = 0f;
}
}
return;
}
public void setNeedResetEglContext(boolean bol){
mNeedResetEglContext = bol;
}
//encoder mp4 end
}

View File

@ -0,0 +1,232 @@
package me.lake.librestreaming.core;
import android.media.AudioFormat;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.os.SystemClock;
import java.nio.ByteBuffer;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import me.lake.librestreaming.filter.softaudiofilter.BaseSoftAudioFilter;
import me.lake.librestreaming.model.RESAudioBuff;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.tools.LogTools;
public class RESSoftAudioCore {
RESCoreParameters resCoreParameters;
private final Object syncOp = new Object();
private MediaCodec dstAudioEncoder;
private MediaFormat dstAudioFormat;
//filter
private Lock lockAudioFilter = null;
private BaseSoftAudioFilter audioFilter;
//AudioBuffs
//buffers to handle buff from queueAudio
private RESAudioBuff[] orignAudioBuffs;
private int lastAudioQueueBuffIndex;
//buffer to handle buff from orignAudioBuffs
private RESAudioBuff orignAudioBuff;
private RESAudioBuff filteredAudioBuff;
private AudioFilterHandler audioFilterHandler;
private HandlerThread audioFilterHandlerThread;
private AudioSenderThread audioSenderThread;
public RESSoftAudioCore(RESCoreParameters parameters) {
resCoreParameters = parameters;
lockAudioFilter = new ReentrantLock(false);
}
public void queueAudio(byte[] rawAudioFrame) {
int targetIndex = (lastAudioQueueBuffIndex + 1) % orignAudioBuffs.length;
if (orignAudioBuffs[targetIndex].isReadyToFill) {
LogTools.d("queueAudio,accept ,targetIndex" + targetIndex);
System.arraycopy(rawAudioFrame, 0, orignAudioBuffs[targetIndex].buff, 0, resCoreParameters.audioRecoderBufferSize);
orignAudioBuffs[targetIndex].isReadyToFill = false;
lastAudioQueueBuffIndex = targetIndex;
audioFilterHandler.sendMessage(audioFilterHandler.obtainMessage(AudioFilterHandler.WHAT_INCOMING_BUFF, targetIndex, 0));
} else {
LogTools.d("queueAudio,abandon,targetIndex" + targetIndex);
}
}
public boolean prepare(RESConfig resConfig) {
synchronized (syncOp) {
resCoreParameters.mediacodecAACProfile = MediaCodecInfo.CodecProfileLevel.AACObjectLC;
resCoreParameters.mediacodecAACSampleRate = 44100;
resCoreParameters.mediacodecAACChannelCount = 1;
resCoreParameters.mediacodecAACBitRate = 32 * 1024;
resCoreParameters.mediacodecAACMaxInputSize = 8820;
dstAudioFormat = new MediaFormat();
dstAudioEncoder = MediaCodecHelper.createAudioMediaCodec(resCoreParameters, dstAudioFormat);
if (dstAudioEncoder == null) {
LogTools.e("create Audio MediaCodec failed");
return false;
}
//audio
//44100/10=4410,4410*2 = 8820
int audioQueueNum = resCoreParameters.audioBufferQueueNum;
int orignAudioBuffSize = resCoreParameters.mediacodecAACSampleRate / 5;
orignAudioBuffs = new RESAudioBuff[audioQueueNum];
for (int i = 0; i < audioQueueNum; i++) {
orignAudioBuffs[i] = new RESAudioBuff(AudioFormat.ENCODING_PCM_16BIT, orignAudioBuffSize);
}
orignAudioBuff = new RESAudioBuff(AudioFormat.ENCODING_PCM_16BIT, orignAudioBuffSize);
filteredAudioBuff = new RESAudioBuff(AudioFormat.ENCODING_PCM_16BIT, orignAudioBuffSize);
return true;
}
}
public void start(RESFlvDataCollecter flvDataCollecter) {
synchronized (syncOp) {
try {
for (RESAudioBuff buff : orignAudioBuffs) {
buff.isReadyToFill = true;
}
if (dstAudioEncoder == null) {
dstAudioEncoder = MediaCodec.createEncoderByType(dstAudioFormat.getString(MediaFormat.KEY_MIME));
}
dstAudioEncoder.configure(dstAudioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
dstAudioEncoder.start();
lastAudioQueueBuffIndex = 0;
audioFilterHandlerThread = new HandlerThread("audioFilterHandlerThread");
audioSenderThread = new AudioSenderThread("AudioSenderThread", dstAudioEncoder, flvDataCollecter);
audioFilterHandlerThread.start();
audioSenderThread.start();
audioFilterHandler = new AudioFilterHandler(audioFilterHandlerThread.getLooper());
} catch (Exception e) {
LogTools.trace("RESSoftAudioCore", e);
}
}
}
public void stop() {
synchronized (syncOp) {
audioFilterHandler.removeCallbacksAndMessages(null);
audioFilterHandlerThread.quit();
try {
audioFilterHandlerThread.join();
audioSenderThread.quit();
audioSenderThread.join();
} catch (InterruptedException e) {
LogTools.trace("RESSoftAudioCore", e);
}
dstAudioEncoder.stop();
dstAudioEncoder.release();
dstAudioEncoder = null;
}
}
public BaseSoftAudioFilter acquireAudioFilter() {
lockAudioFilter.lock();
return audioFilter;
}
public void releaseAudioFilter() {
lockAudioFilter.unlock();
}
public void setAudioFilter(BaseSoftAudioFilter baseSoftAudioFilter) {
lockAudioFilter.lock();
if (audioFilter != null) {
audioFilter.onDestroy();
}
audioFilter = baseSoftAudioFilter;
if (audioFilter != null) {
audioFilter.onInit(resCoreParameters.mediacodecAACSampleRate / 5);
}
lockAudioFilter.unlock();
}
public void destroy() {
synchronized (syncOp) {
lockAudioFilter.lock();
if (audioFilter != null) {
audioFilter.onDestroy();
}
lockAudioFilter.unlock();
}
}
private class AudioFilterHandler extends Handler {
public static final int FILTER_LOCK_TOLERATION = 3;//3ms
public static final int WHAT_INCOMING_BUFF = 1;
private int sequenceNum;
AudioFilterHandler(Looper looper) {
super(looper);
sequenceNum = 0;
}
@Override
public void handleMessage(Message msg) {
if (msg.what != WHAT_INCOMING_BUFF) {
return;
}
sequenceNum++;
int targetIndex = msg.arg1;
long nowTimeMs = SystemClock.uptimeMillis();
System.arraycopy(orignAudioBuffs[targetIndex].buff, 0,
orignAudioBuff.buff, 0, orignAudioBuff.buff.length);
orignAudioBuffs[targetIndex].isReadyToFill = true;
boolean isFilterLocked = lockAudioFilter();
boolean filtered = false;
if (isFilterLocked) {
filtered = audioFilter.onFrame(orignAudioBuff.buff, filteredAudioBuff.buff, nowTimeMs, sequenceNum);
unlockAudioFilter();
} else {
System.arraycopy(orignAudioBuffs[targetIndex].buff, 0,
orignAudioBuff.buff, 0, orignAudioBuff.buff.length);
orignAudioBuffs[targetIndex].isReadyToFill = true;
}
//orignAudioBuff is ready
int eibIndex = dstAudioEncoder.dequeueInputBuffer(-1);
if (eibIndex >= 0) {
ByteBuffer dstAudioEncoderIBuffer = dstAudioEncoder.getInputBuffers()[eibIndex];
dstAudioEncoderIBuffer.position(0);
dstAudioEncoderIBuffer.put(filtered?filteredAudioBuff.buff:orignAudioBuff.buff, 0, orignAudioBuff.buff.length);
dstAudioEncoder.queueInputBuffer(eibIndex, 0, orignAudioBuff.buff.length, nowTimeMs * 1000, 0);
} else {
LogTools.d("dstAudioEncoder.dequeueInputBuffer(-1)<0");
}
LogTools.d("AudioFilterHandler,ProcessTime:" + (System.currentTimeMillis() - nowTimeMs));
}
/**
* @return ture if filter locked & filter!=null
*/
private boolean lockAudioFilter() {
try {
boolean locked = lockAudioFilter.tryLock(FILTER_LOCK_TOLERATION, TimeUnit.MILLISECONDS);
if (locked) {
if (audioFilter != null) {
return true;
} else {
lockAudioFilter.unlock();
return false;
}
} else {
return false;
}
} catch (InterruptedException e) {
}
return false;
}
private void unlockAudioFilter() {
lockAudioFilter.unlock();
}
}
}

View File

@ -0,0 +1,555 @@
package me.lake.librestreaming.core;
import android.annotation.TargetApi;
import android.graphics.Bitmap;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.os.SystemClock;
import java.nio.ByteBuffer;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import me.lake.librestreaming.client.CallbackDelivery;
import me.lake.librestreaming.core.listener.RESScreenShotListener;
import me.lake.librestreaming.core.listener.RESVideoChangeListener;
import me.lake.librestreaming.encoder.MediaVideoEncoder;
import me.lake.librestreaming.filter.softvideofilter.BaseSoftVideoFilter;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.model.RESVideoBuff;
import me.lake.librestreaming.render.GLESRender;
import me.lake.librestreaming.render.IRender;
import me.lake.librestreaming.render.NativeRender;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.tools.BuffSizeCalculator;
import me.lake.librestreaming.tools.LogTools;
public class RESSoftVideoCore implements RESVideoCore {
RESCoreParameters resCoreParameters;
private final Object syncOp = new Object();
private SurfaceTexture cameraTexture;
private int currentCamera;
private MediaCodec dstVideoEncoder;
private boolean isEncoderStarted;
private final Object syncDstVideoEncoder = new Object();
private MediaFormat dstVideoFormat;
//render
private final Object syncPreview = new Object();
private IRender previewRender;
//filter
private Lock lockVideoFilter = null;
private BaseSoftVideoFilter videoFilter;
private VideoFilterHandler videoFilterHandler;
private HandlerThread videoFilterHandlerThread;
//sender
private VideoSenderThread videoSenderThread;
//VideoBuffs
//buffers to handle buff from queueVideo
private RESVideoBuff[] orignVideoBuffs;
private int lastVideoQueueBuffIndex;
//buffer to convert orignVideoBuff to NV21 if filter are set
private RESVideoBuff orignNV21VideoBuff;
//buffer to handle filtered color from filter if filter are set
private RESVideoBuff filteredNV21VideoBuff;
//buffer to convert other color format to suitable color format for dstVideoEncoder if nessesary
private RESVideoBuff suitable4VideoEncoderBuff;
final private Object syncResScreenShotListener = new Object();
private RESScreenShotListener resScreenShotListener;
private final Object syncIsLooping = new Object();
private boolean isPreviewing = false;
private boolean isStreaming = false;
private int loopingInterval;
public RESSoftVideoCore(RESCoreParameters parameters) {
resCoreParameters = parameters;
lockVideoFilter = new ReentrantLock(false);
videoFilter = null;
}
public void setCurrentCamera(int camIndex) {
if (currentCamera != camIndex) {
synchronized (syncOp) {
if (videoFilterHandler != null) {
videoFilterHandler.removeMessages(VideoFilterHandler.WHAT_INCOMING_BUFF);
}
if (orignVideoBuffs != null) {
for (RESVideoBuff buff : orignVideoBuffs) {
buff.isReadyToFill = true;
}
lastVideoQueueBuffIndex = 0;
}
}
}
currentCamera = camIndex;
}
@Override
public boolean prepare(RESConfig resConfig) {
synchronized (syncOp) {
resCoreParameters.renderingMode = resConfig.getRenderingMode();
resCoreParameters.mediacdoecAVCBitRate = resConfig.getBitRate();
resCoreParameters.videoBufferQueueNum = resConfig.getVideoBufferQueueNum();
resCoreParameters.mediacodecAVCIFrameInterval = resConfig.getVideoGOP();
resCoreParameters.mediacodecAVCFrameRate = resCoreParameters.videoFPS;
loopingInterval = 1000 / resCoreParameters.videoFPS;
dstVideoFormat = new MediaFormat();
synchronized (syncDstVideoEncoder) {
dstVideoEncoder = MediaCodecHelper.createSoftVideoMediaCodec(resCoreParameters, dstVideoFormat);
isEncoderStarted = false;
if (dstVideoEncoder == null) {
LogTools.e("create Video MediaCodec failed");
return false;
}
}
resCoreParameters.previewBufferSize = BuffSizeCalculator.calculator(resCoreParameters.videoWidth,
resCoreParameters.videoHeight, resCoreParameters.previewColorFormat);
//video
int videoWidth = resCoreParameters.videoWidth;
int videoHeight = resCoreParameters.videoHeight;
int videoQueueNum = resCoreParameters.videoBufferQueueNum;
orignVideoBuffs = new RESVideoBuff[videoQueueNum];
for (int i = 0; i < videoQueueNum; i++) {
orignVideoBuffs[i] = new RESVideoBuff(resCoreParameters.previewColorFormat, resCoreParameters.previewBufferSize);
}
lastVideoQueueBuffIndex = 0;
orignNV21VideoBuff = new RESVideoBuff(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
BuffSizeCalculator.calculator(videoWidth, videoHeight, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar));
filteredNV21VideoBuff = new RESVideoBuff(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
BuffSizeCalculator.calculator(videoWidth, videoHeight, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar));
suitable4VideoEncoderBuff = new RESVideoBuff(resCoreParameters.mediacodecAVCColorFormat,
BuffSizeCalculator.calculator(videoWidth, videoHeight, resCoreParameters.mediacodecAVCColorFormat));
videoFilterHandlerThread = new HandlerThread("videoFilterHandlerThread");
videoFilterHandlerThread.start();
videoFilterHandler = new VideoFilterHandler(videoFilterHandlerThread.getLooper());
return true;
}
}
@Override
public boolean startStreaming(RESFlvDataCollecter flvDataCollecter) {
synchronized (syncOp) {
try {
synchronized (syncDstVideoEncoder) {
if (dstVideoEncoder == null) {
dstVideoEncoder = MediaCodec.createEncoderByType(dstVideoFormat.getString(MediaFormat.KEY_MIME));
}
dstVideoEncoder.configure(dstVideoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
dstVideoEncoder.start();
isEncoderStarted = true;
}
videoSenderThread = new VideoSenderThread("VideoSenderThread", dstVideoEncoder, flvDataCollecter);
videoSenderThread.start();
synchronized (syncIsLooping) {
if (!isPreviewing && !isStreaming) {
videoFilterHandler.removeMessages(VideoFilterHandler.WHAT_DRAW);
videoFilterHandler.sendMessageDelayed(videoFilterHandler.obtainMessage(VideoFilterHandler.WHAT_DRAW, SystemClock.uptimeMillis() + loopingInterval), loopingInterval);
}
isStreaming = true;
}
} catch (Exception e) {
LogTools.trace("RESVideoClient.start()failed", e);
return false;
}
return true;
}
}
@Override
public void updateCamTexture(SurfaceTexture camTex) {
}
@Override
public boolean stopStreaming() {
synchronized (syncOp) {
videoSenderThread.quit();
synchronized (syncIsLooping) {
isStreaming = false;
}
try {
videoSenderThread.join();
} catch (InterruptedException e) {
LogTools.trace("RESCore", e);
}
synchronized (syncDstVideoEncoder) {
dstVideoEncoder.stop();
dstVideoEncoder.release();
dstVideoEncoder = null;
isEncoderStarted = false;
}
videoSenderThread = null;
return true;
}
}
@Override
public boolean destroy() {
synchronized (syncOp) {
lockVideoFilter.lock();
if (videoFilter != null) {
videoFilter.onDestroy();
}
lockVideoFilter.unlock();
return true;
}
}
@TargetApi(Build.VERSION_CODES.KITKAT)
@Override
public void reSetVideoBitrate(int bitrate) {
synchronized (syncOp) {
if (videoFilterHandler != null) {
videoFilterHandler.sendMessage(videoFilterHandler.obtainMessage(VideoFilterHandler.WHAT_RESET_BITRATE, bitrate, 0));
resCoreParameters.mediacdoecAVCBitRate = bitrate;
dstVideoFormat.setInteger(MediaFormat.KEY_BIT_RATE, resCoreParameters.mediacdoecAVCBitRate);
}
}
}
@TargetApi(Build.VERSION_CODES.KITKAT)
@Override
public int getVideoBitrate() {
synchronized (syncOp) {
return resCoreParameters.mediacdoecAVCBitRate;
}
}
@Override
public void reSetVideoFPS(int fps) {
synchronized (syncOp) {
resCoreParameters.videoFPS = fps;
loopingInterval = 1000 / resCoreParameters.videoFPS;
}
}
@Override
public void reSetVideoSize(RESCoreParameters newParameters) {
}
@Override
public void startPreview(SurfaceTexture surfaceTexture, int visualWidth, int visualHeight) {
synchronized (syncPreview) {
if (previewRender != null) {
throw new RuntimeException("startPreview without destroy previous");
}
switch (resCoreParameters.renderingMode) {
case RESCoreParameters.RENDERING_MODE_NATIVE_WINDOW:
previewRender = new NativeRender();
break;
case RESCoreParameters.RENDERING_MODE_OPENGLES:
previewRender = new GLESRender();
break;
default:
throw new RuntimeException("Unknow rendering mode");
}
previewRender.create(surfaceTexture,
resCoreParameters.previewColorFormat,
resCoreParameters.videoWidth,
resCoreParameters.videoHeight,
visualWidth,
visualHeight);
synchronized (syncIsLooping) {
if (!isPreviewing && !isStreaming) {
videoFilterHandler.removeMessages(VideoFilterHandler.WHAT_DRAW);
videoFilterHandler.sendMessageDelayed(videoFilterHandler.obtainMessage(VideoFilterHandler.WHAT_DRAW, SystemClock.uptimeMillis() + loopingInterval), loopingInterval);
}
isPreviewing = true;
}
}
}
@Override
public void updatePreview(int visualWidth, int visualHeight) {
synchronized (syncPreview) {
if (previewRender == null) {
throw new RuntimeException("updatePreview without startPreview");
}
previewRender.update(visualWidth, visualHeight);
}
}
@Override
public void stopPreview(boolean releaseTexture) {
synchronized (syncPreview) {
if (previewRender == null) {
throw new RuntimeException("stopPreview without startPreview");
}
previewRender.destroy(releaseTexture);
previewRender = null;
synchronized (syncIsLooping) {
isPreviewing = false;
}
}
}
public void queueVideo(byte[] rawVideoFrame) {
synchronized (syncOp) {
int targetIndex = (lastVideoQueueBuffIndex + 1) % orignVideoBuffs.length;
if (orignVideoBuffs[targetIndex].isReadyToFill) {
LogTools.d("queueVideo,accept ,targetIndex" + targetIndex);
acceptVideo(rawVideoFrame, orignVideoBuffs[targetIndex].buff);
orignVideoBuffs[targetIndex].isReadyToFill = false;
lastVideoQueueBuffIndex = targetIndex;
videoFilterHandler.sendMessage(videoFilterHandler.obtainMessage(VideoFilterHandler.WHAT_INCOMING_BUFF, targetIndex, 0));
} else {
LogTools.d("queueVideo,abandon,targetIndex" + targetIndex);
}
}
}
private void acceptVideo(byte[] src, byte[] dst) {
int directionFlag = currentCamera == Camera.CameraInfo.CAMERA_FACING_BACK ? resCoreParameters.backCameraDirectionMode : resCoreParameters.frontCameraDirectionMode;
ColorHelper.NV21Transform(src,
dst,
resCoreParameters.previewVideoWidth,
resCoreParameters.previewVideoHeight,
directionFlag);
}
public BaseSoftVideoFilter acquireVideoFilter() {
lockVideoFilter.lock();
return videoFilter;
}
public void releaseVideoFilter() {
lockVideoFilter.unlock();
}
public void setVideoFilter(BaseSoftVideoFilter baseSoftVideoFilter) {
lockVideoFilter.lock();
if (videoFilter != null) {
videoFilter.onDestroy();
}
videoFilter = baseSoftVideoFilter;
if (videoFilter != null) {
videoFilter.onInit(resCoreParameters.videoWidth, resCoreParameters.videoHeight);
}
lockVideoFilter.unlock();
}
@Override
public void takeScreenShot(RESScreenShotListener listener) {
synchronized (syncResScreenShotListener) {
resScreenShotListener = listener;
}
}
@Override
public void setVideoChangeListener(RESVideoChangeListener listener) {
}
@Override
public float getDrawFrameRate() {
synchronized (syncOp) {
return videoFilterHandler == null ? 0 : videoFilterHandler.getDrawFrameRate();
}
}
//worker handler
private class VideoFilterHandler extends Handler {
public static final int FILTER_LOCK_TOLERATION = 3;//3ms
public static final int WHAT_INCOMING_BUFF = 1;
public static final int WHAT_DRAW = 2;
public static final int WHAT_RESET_BITRATE = 3;
private int sequenceNum;
private RESFrameRateMeter drawFrameRateMeter;
VideoFilterHandler(Looper looper) {
super(looper);
sequenceNum = 0;
drawFrameRateMeter = new RESFrameRateMeter();
}
public float getDrawFrameRate() {
return drawFrameRateMeter.getFps();
}
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case WHAT_INCOMING_BUFF: {
int targetIndex = msg.arg1;
/**
* orignVideoBuffs[targetIndex] is ready
* orignVideoBuffs[targetIndex]->orignNV21VideoBuff
*/
System.arraycopy(orignVideoBuffs[targetIndex].buff, 0,
orignNV21VideoBuff.buff, 0, orignNV21VideoBuff.buff.length);
orignVideoBuffs[targetIndex].isReadyToFill = true;
}
break;
case WHAT_DRAW: {
long time = (Long) msg.obj;
long interval = time + loopingInterval - SystemClock.uptimeMillis();
synchronized (syncIsLooping) {
if (isPreviewing || isStreaming) {
if (interval > 0) {
videoFilterHandler.sendMessageDelayed(videoFilterHandler.obtainMessage(
VideoFilterHandler.WHAT_DRAW,
SystemClock.uptimeMillis() + interval),
interval);
} else {
videoFilterHandler.sendMessage(videoFilterHandler.obtainMessage(
VideoFilterHandler.WHAT_DRAW,
SystemClock.uptimeMillis() + loopingInterval));
}
}
}
sequenceNum++;
long nowTimeMs = SystemClock.uptimeMillis();
boolean isFilterLocked = lockVideoFilter();
if (isFilterLocked) {
boolean modified;
modified = videoFilter.onFrame(orignNV21VideoBuff.buff, filteredNV21VideoBuff.buff, nowTimeMs, sequenceNum);
unlockVideoFilter();
rendering(modified ? filteredNV21VideoBuff.buff : orignNV21VideoBuff.buff);
checkScreenShot(modified ? filteredNV21VideoBuff.buff : orignNV21VideoBuff.buff);
/**
* orignNV21VideoBuff is ready
* orignNV21VideoBuff->suitable4VideoEncoderBuff
*/
if (resCoreParameters.mediacodecAVCColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar) {
ColorHelper.NV21TOYUV420SP(modified ? filteredNV21VideoBuff.buff : orignNV21VideoBuff.buff,
suitable4VideoEncoderBuff.buff, resCoreParameters.videoWidth * resCoreParameters.videoHeight);
} else if (resCoreParameters.mediacodecAVCColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar) {
ColorHelper.NV21TOYUV420P(modified ? filteredNV21VideoBuff.buff : orignNV21VideoBuff.buff,
suitable4VideoEncoderBuff.buff, resCoreParameters.videoWidth * resCoreParameters.videoHeight);
} else {//LAKETODO colorConvert
}
} else {
rendering(orignNV21VideoBuff.buff);
checkScreenShot(orignNV21VideoBuff.buff);
if (resCoreParameters.mediacodecAVCColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar) {
ColorHelper.NV21TOYUV420SP(orignNV21VideoBuff.buff,
suitable4VideoEncoderBuff.buff,
resCoreParameters.videoWidth * resCoreParameters.videoHeight);
} else if (resCoreParameters.mediacodecAVCColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar) {
ColorHelper.NV21TOYUV420P(orignNV21VideoBuff.buff,
suitable4VideoEncoderBuff.buff,
resCoreParameters.videoWidth * resCoreParameters.videoHeight);
}
orignNV21VideoBuff.isReadyToFill = true;
}
drawFrameRateMeter.count();
//suitable4VideoEncoderBuff is ready
synchronized (syncDstVideoEncoder) {
if (dstVideoEncoder != null && isEncoderStarted) {
int eibIndex = dstVideoEncoder.dequeueInputBuffer(-1);
if (eibIndex >= 0) {
ByteBuffer dstVideoEncoderIBuffer = dstVideoEncoder.getInputBuffers()[eibIndex];
dstVideoEncoderIBuffer.position(0);
dstVideoEncoderIBuffer.put(suitable4VideoEncoderBuff.buff, 0, suitable4VideoEncoderBuff.buff.length);
dstVideoEncoder.queueInputBuffer(eibIndex, 0, suitable4VideoEncoderBuff.buff.length, nowTimeMs * 1000, 0);
} else {
LogTools.d("dstVideoEncoder.dequeueInputBuffer(-1)<0");
}
}
}
LogTools.d("VideoFilterHandler,ProcessTime:" + (System.currentTimeMillis() - nowTimeMs));
}
break;
case WHAT_RESET_BITRATE: {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT && dstVideoEncoder != null) {
Bundle bitrateBundle = new Bundle();
bitrateBundle.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, msg.arg1);
dstVideoEncoder.setParameters(bitrateBundle);
}
}
break;
}
}
/**
* rendering nv21 using native window
*
* @param pixel
*/
private void rendering(byte[] pixel) {
synchronized (syncPreview) {
if (previewRender == null) {
return;
}
previewRender.rendering(pixel);
}
}
/**
* check if screenshotlistener exist
*
* @param pixel
*/
private void checkScreenShot(byte[] pixel) {
synchronized (syncResScreenShotListener) {
if (resScreenShotListener != null) {
int[] argbPixel = new int[resCoreParameters.videoWidth * resCoreParameters.videoHeight];
ColorHelper.NV21TOARGB(pixel,
argbPixel,
resCoreParameters.videoWidth,
resCoreParameters.videoHeight);
Bitmap result = Bitmap.createBitmap(argbPixel,
resCoreParameters.videoWidth,
resCoreParameters.videoHeight,
Bitmap.Config.ARGB_8888);
CallbackDelivery.i().post(new RESScreenShotListener.RESScreenShotListenerRunable(resScreenShotListener, result));
resScreenShotListener = null;
}
}
}
/**
* @return ture if filter locked & filter!=null
*/
private boolean lockVideoFilter() {
try {
boolean locked = lockVideoFilter.tryLock(FILTER_LOCK_TOLERATION, TimeUnit.MILLISECONDS);
if (locked) {
if (videoFilter != null) {
return true;
} else {
lockVideoFilter.unlock();
return false;
}
} else {
return false;
}
} catch (InterruptedException e) {
}
return false;
}
private void unlockVideoFilter() {
lockVideoFilter.unlock();
}
}
public void setVideoEncoder(final MediaVideoEncoder encoder) {
}
@Override
public void setMirror(boolean isEnableMirror, boolean isEnablePreviewMirror, boolean isEnableStreamMirror) {
}
public void setNeedResetEglContext(boolean bol){
}
}

View File

@ -0,0 +1,53 @@
package me.lake.librestreaming.core;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import me.lake.librestreaming.core.listener.RESScreenShotListener;
import me.lake.librestreaming.core.listener.RESVideoChangeListener;
import me.lake.librestreaming.encoder.MediaVideoEncoder;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
public interface RESVideoCore {
int OVERWATCH_TEXTURE_ID = 10;
boolean prepare(RESConfig resConfig);
void updateCamTexture(SurfaceTexture camTex);
void startPreview(SurfaceTexture surfaceTexture, int visualWidth, int visualHeight);
void updatePreview(int visualWidth, int visualHeight);
void stopPreview(boolean releaseTexture);
boolean startStreaming(RESFlvDataCollecter flvDataCollecter);
boolean stopStreaming();
boolean destroy();
void reSetVideoBitrate(int bitrate);
int getVideoBitrate();
void reSetVideoFPS(int fps);
void reSetVideoSize(RESCoreParameters newParameters);
void setCurrentCamera(int cameraIndex);
void takeScreenShot(RESScreenShotListener listener);
void setVideoChangeListener(RESVideoChangeListener listener);
float getDrawFrameRate();
void setVideoEncoder(final MediaVideoEncoder encoder);
void setMirror(boolean isEnableMirror,boolean isEnablePreviewMirror,boolean isEnableStreamMirror);
void setNeedResetEglContext(boolean bol);
}

View File

@ -0,0 +1,138 @@
package me.lake.librestreaming.core;
import android.media.MediaCodec;
import android.media.MediaFormat;
import java.nio.ByteBuffer;
import me.lake.librestreaming.rtmp.RESFlvData;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.rtmp.RESRtmpSender;
import me.lake.librestreaming.tools.LogTools;
public class VideoSenderThread extends Thread {
private static final long WAIT_TIME = 5000;
private MediaCodec.BufferInfo eInfo;
private long startTime = 0;
private MediaCodec dstVideoEncoder;
private final Object syncDstVideoEncoder = new Object();
private RESFlvDataCollecter dataCollecter;
VideoSenderThread(String name, MediaCodec encoder, RESFlvDataCollecter flvDataCollecter) {
super(name);
eInfo = new MediaCodec.BufferInfo();
startTime = 0;
dstVideoEncoder = encoder;
dataCollecter = flvDataCollecter;
}
public void updateMediaCodec(MediaCodec encoder) {
synchronized (syncDstVideoEncoder) {
dstVideoEncoder = encoder;
}
}
private boolean shouldQuit = false;
void quit() {
shouldQuit = true;
this.interrupt();
}
@Override
public void run() {
while (!shouldQuit) {
synchronized (syncDstVideoEncoder) {
int eobIndex = MediaCodec.INFO_TRY_AGAIN_LATER;
try {
eobIndex = dstVideoEncoder.dequeueOutputBuffer(eInfo, WAIT_TIME);
} catch (Exception ignored) {
}
switch (eobIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
LogTools.d("VideoSenderThread,MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED");
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
// LogTools.d("VideoSenderThread,MediaCodec.INFO_TRY_AGAIN_LATER");
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
LogTools.d("VideoSenderThread,MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:" +
dstVideoEncoder.getOutputFormat().toString());
sendAVCDecoderConfigurationRecord(0, dstVideoEncoder.getOutputFormat());
break;
default:
LogTools.d("VideoSenderThread,MediaCode,eobIndex=" + eobIndex);
if (startTime == 0) {
startTime = eInfo.presentationTimeUs / 1000;
}
/**
* we send sps pps already in INFO_OUTPUT_FORMAT_CHANGED
* so we ignore MediaCodec.BUFFER_FLAG_CODEC_CONFIG
*/
if (eInfo.flags != MediaCodec.BUFFER_FLAG_CODEC_CONFIG && eInfo.size != 0) {
ByteBuffer realData = dstVideoEncoder.getOutputBuffers()[eobIndex];
realData.position(eInfo.offset + 4);
realData.limit(eInfo.offset + eInfo.size);
sendRealData((eInfo.presentationTimeUs / 1000) - startTime, realData);
}
dstVideoEncoder.releaseOutputBuffer(eobIndex, false);
break;
}
}
try {
sleep(5);
} catch (InterruptedException ignored) {
}
}
eInfo = null;
}
private void sendAVCDecoderConfigurationRecord(long tms, MediaFormat format) {
byte[] AVCDecoderConfigurationRecord = Packager.H264Packager.generateAVCDecoderConfigurationRecord(format);
int packetLen = Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH +
AVCDecoderConfigurationRecord.length;
byte[] finalBuff = new byte[packetLen];
Packager.FLVPackager.fillFlvVideoTag(finalBuff,
0,
true,
true,
AVCDecoderConfigurationRecord.length);
System.arraycopy(AVCDecoderConfigurationRecord, 0,
finalBuff, Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH, AVCDecoderConfigurationRecord.length);
RESFlvData resFlvData = new RESFlvData();
resFlvData.droppable = false;
resFlvData.byteBuffer = finalBuff;
resFlvData.size = finalBuff.length;
resFlvData.dts = (int) tms;
resFlvData.flvTagType = RESFlvData.FLV_RTMP_PACKET_TYPE_VIDEO;
resFlvData.videoFrameType = RESFlvData.NALU_TYPE_IDR;
dataCollecter.collect(resFlvData, RESRtmpSender.FROM_VIDEO);
}
private void sendRealData(long tms, ByteBuffer realData) {
int realDataLength = realData.remaining();
int packetLen = Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH +
Packager.FLVPackager.NALU_HEADER_LENGTH +
realDataLength;
byte[] finalBuff = new byte[packetLen];
realData.get(finalBuff, Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH +
Packager.FLVPackager.NALU_HEADER_LENGTH,
realDataLength);
int frameType = finalBuff[Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH +
Packager.FLVPackager.NALU_HEADER_LENGTH] & 0x1F;
Packager.FLVPackager.fillFlvVideoTag(finalBuff,
0,
false,
frameType == 5,
realDataLength);
RESFlvData resFlvData = new RESFlvData();
resFlvData.droppable = true;
resFlvData.byteBuffer = finalBuff;
resFlvData.size = finalBuff.length;
resFlvData.dts = (int) tms;
resFlvData.flvTagType = RESFlvData.FLV_RTMP_PACKET_TYPE_VIDEO;
resFlvData.videoFrameType = frameType;
dataCollecter.collect(resFlvData, RESRtmpSender.FROM_VIDEO);
}
}

View File

@ -0,0 +1,27 @@
package me.lake.librestreaming.core.listener;
public interface RESConnectionListener {
void onOpenConnectionResult(int result);
void onWriteError(int errno);
void onCloseConnectionResult(int result);
class RESWriteErrorRunable implements Runnable {
RESConnectionListener connectionListener;
int errno;
public RESWriteErrorRunable(RESConnectionListener connectionListener, int errno) {
this.connectionListener = connectionListener;
this.errno = errno;
}
@Override
public void run() {
if (connectionListener != null) {
connectionListener.onWriteError(errno);
}
}
}
}

View File

@ -0,0 +1,25 @@
package me.lake.librestreaming.core.listener;
import android.graphics.Bitmap;
public interface RESScreenShotListener {
void onScreenShotResult(Bitmap bitmap);
class RESScreenShotListenerRunable implements Runnable {
Bitmap resultBitmap;
RESScreenShotListener resScreenShotListener;
public RESScreenShotListenerRunable(RESScreenShotListener listener, Bitmap bitmap) {
resScreenShotListener = listener;
resultBitmap = bitmap;
}
@Override
public void run() {
if (resScreenShotListener != null) {
resScreenShotListener.onScreenShotResult(resultBitmap);
}
}
}
}

View File

@ -0,0 +1,24 @@
package me.lake.librestreaming.core.listener;
public interface RESVideoChangeListener {
void onVideoSizeChanged(int width, int height);
class RESVideoChangeRunable implements Runnable {
RESVideoChangeListener videoChangeListener;
int w, h;
public RESVideoChangeRunable(RESVideoChangeListener videoChangeListener, int w, int h) {
this.videoChangeListener = videoChangeListener;
this.w = w;
this.h = h;
}
@Override
public void run() {
if (videoChangeListener != null) {
videoChangeListener.onVideoSizeChanged(w, h);
}
}
}
}

View File

@ -0,0 +1,186 @@
package me.lake.librestreaming.encoder;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.util.Log;
import java.io.IOException;
import java.nio.ByteBuffer;
public class MediaAudioEncoder extends MediaEncoder {
private static final boolean DEBUG = false; // TODO set false on release
private static final String TAG = "MediaAudioEncoder";
private static final String MIME_TYPE = "audio/mp4a-latm";
private static final int SAMPLE_RATE = 44100; // 44.1[KHz] is only setting guaranteed to be available on all devices.
private static final int BIT_RATE = 64000;
public static final int SAMPLES_PER_FRAME = 1024; // AAC, bytes/frame/channel
public static final int FRAMES_PER_BUFFER = 25; // AAC, frame/buffer/sec
private AudioThread mAudioThread = null;
public MediaAudioEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener) {
super(muxer, listener);
}
@Override
protected void prepare() throws IOException {
if (DEBUG) Log.v(TAG, "prepare:");
mTrackIndex = -1;
mMuxerStarted = mIsEOS = false;
// prepare MediaCodec for AAC encoding of audio data from inernal mic.
final MediaCodecInfo audioCodecInfo = selectAudioCodec(MIME_TYPE);
if (audioCodecInfo == null) {
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
return;
}
if (DEBUG) Log.i(TAG, "selected codec: " + audioCodecInfo.getName());
final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, 2);
audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_STEREO);
audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 2);
// audioFormat.setLong(MediaFormat.KEY_MAX_INPUT_SIZE, inputFile.length());
// audioFormat.setLong(MediaFormat.KEY_DURATION, (long)durationInMs );
if (DEBUG) Log.i(TAG, "format: " + audioFormat);
mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
if (DEBUG) Log.i(TAG, "prepare finishing");
if (mListener != null) {
try {
mListener.onPrepared(this);
} catch (final Exception e) {
Log.e(TAG, "prepare:", e);
}
}
}
@Override
protected void startRecording() {
super.startRecording();
// create and execute audio capturing thread using internal mic
if (mAudioThread == null) {
mAudioThread = new AudioThread();
mAudioThread.start();
}
}
@Override
protected void release() {
mAudioThread = null;
super.release();
}
private static final int[] AUDIO_SOURCES = new int[] {
MediaRecorder.AudioSource.MIC,
MediaRecorder.AudioSource.DEFAULT,
MediaRecorder.AudioSource.CAMCORDER,
MediaRecorder.AudioSource.VOICE_COMMUNICATION,
MediaRecorder.AudioSource.VOICE_RECOGNITION,
};
/**
* Thread to capture audio data from internal mic as uncompressed 16bit PCM data
* and write them to the MediaCodec encoder
*/
private class AudioThread extends Thread {
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
try {
final int min_buffer_size = AudioRecord.getMinBufferSize(
SAMPLE_RATE, AudioFormat.CHANNEL_IN_STEREO,
AudioFormat.ENCODING_PCM_16BIT);
int buffer_size = SAMPLES_PER_FRAME * FRAMES_PER_BUFFER;
if (buffer_size < min_buffer_size)
buffer_size = ((min_buffer_size / SAMPLES_PER_FRAME) + 1) * SAMPLES_PER_FRAME * 2;
AudioRecord audioRecord = null;
for (final int source : AUDIO_SOURCES) {
try {
audioRecord = new AudioRecord(
source, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_STEREO, AudioFormat.ENCODING_PCM_16BIT, buffer_size);
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED)
audioRecord = null;
} catch (final Exception e) {
audioRecord = null;
}
if (audioRecord != null) break;
}
if (audioRecord != null) {
try {
if (mIsCapturing) {
if (DEBUG) Log.v(TAG, "AudioThread:start audio recording");
final ByteBuffer buf = ByteBuffer.allocateDirect(SAMPLES_PER_FRAME);
int readBytes;
audioRecord.startRecording();
try {
for (; mIsCapturing && !mRequestStop && !mIsEOS ;) {
// read audio data from internal mic
buf.clear();
readBytes = audioRecord.read(buf, SAMPLES_PER_FRAME);
if (readBytes > 0) {
// set audio data to encoder
buf.position(readBytes);
buf.flip();
encode(buf, readBytes, getPTSUs());
frameAvailableSoon();
}
}
frameAvailableSoon();
} finally {
audioRecord.stop();
}
}
} finally {
audioRecord.release();
}
} else {
Log.e(TAG, "failed to initialize AudioRecord");
}
} catch (final Exception e) {
Log.e(TAG, "AudioThread#run", e);
}
if (DEBUG) Log.v(TAG, "AudioThread:finished");
}
}
/**
* select the first codec that match a specific MIME type
* @param mimeType
* @return
*/
private static final MediaCodecInfo selectAudioCodec(final String mimeType) {
if (DEBUG) Log.v(TAG, "selectAudioCodec:");
MediaCodecInfo result = null;
// get the list of available codecs
final int numCodecs = MediaCodecList.getCodecCount();
LOOP: for (int i = 0; i < numCodecs; i++) {
final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) { // skipp decoder
continue;
}
final String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (DEBUG) Log.i(TAG, "supportedType:" + codecInfo.getName() + ",MIME=" + types[j]);
if (types[j].equalsIgnoreCase(mimeType)) {
if (result == null) {
result = codecInfo;
break LOOP;
}
}
}
}
return result;
}
}

View File

@ -0,0 +1,379 @@
package me.lake.librestreaming.encoder;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.util.Log;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
public abstract class MediaEncoder implements Runnable {
private static final boolean DEBUG = false; // TODO set false on release
private static final String TAG = "MediaEncoder";
protected static final int TIMEOUT_USEC = 10000; // 10[msec]
protected static final int MSG_FRAME_AVAILABLE = 1;
protected static final int MSG_STOP_RECORDING = 9;
public interface MediaEncoderListener {
public void onPrepared(MediaEncoder encoder);
public void onStopped(MediaEncoder encoder);
}
protected final Object mSync = new Object();
/**
* Flag that indicate this encoder is capturing now.
*/
protected volatile boolean mIsCapturing;
/**
* Flag that indicate the frame data will be available soon.
*/
private int mRequestDrain;
/**
* Flag to request stop capturing
*/
protected volatile boolean mRequestStop;
/**
* Flag that indicate encoder received EOS(End Of Stream)
*/
protected boolean mIsEOS;
/**
* Flag the indicate the muxer is running
*/
protected boolean mMuxerStarted;
/**
* Track Number
*/
protected int mTrackIndex;
/**
* MediaCodec instance for encoding
*/
protected MediaCodec mMediaCodec; // API >= 16(Android4.1.2)
/**
* Weak refarence of MediaMuxerWarapper instance
*/
protected final WeakReference<MediaMuxerWrapper> mWeakMuxer;
/**
* BufferInfo instance for dequeuing
*/
private MediaCodec.BufferInfo mBufferInfo; // API >= 16(Android4.1.2)
protected final MediaEncoderListener mListener;
public MediaEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener) {
if (listener == null) throw new NullPointerException("MediaEncoderListener is null");
if (muxer == null) throw new NullPointerException("MediaMuxerWrapper is null");
mWeakMuxer = new WeakReference<MediaMuxerWrapper>(muxer);
muxer.addEncoder(this);
mListener = listener;
synchronized (mSync) {
// create BufferInfo here for effectiveness(to reduce GC)
mBufferInfo = new MediaCodec.BufferInfo();
// wait for starting thread
new Thread(this, getClass().getSimpleName()).start();
try {
mSync.wait();
} catch (final InterruptedException e) {
}
}
}
public String getOutputPath() {
final MediaMuxerWrapper muxer = mWeakMuxer.get();
return muxer != null ? muxer.getOutputPath() : null;
}
/**
* the method to indicate frame data is soon available or already available
* @return return true if encoder is ready to encod.
*/
public boolean frameAvailableSoon() {
if (DEBUG) Log.v(TAG, "frameAvailableSoon");
synchronized (mSync) {
if (!mIsCapturing || mRequestStop) {
return false;
}
mRequestDrain++;
mSync.notifyAll();
}
return true;
}
/**
* encoding loop on private thread
*/
@Override
public void run() {
// android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
synchronized (mSync) {
mRequestStop = false;
mRequestDrain = 0;
mSync.notify();
}
final boolean isRunning = true;
boolean localRequestStop;
boolean localRequestDrain;
while (isRunning) {
synchronized (mSync) {
localRequestStop = mRequestStop;
localRequestDrain = (mRequestDrain > 0);
if (localRequestDrain)
mRequestDrain--;
}
if (localRequestStop) {
drain();
// request stop recording
signalEndOfInputStream();
// process output data again for EOS signale
drain();
// release all related objects
release();
break;
}
if (localRequestDrain) {
drain();
} else {
synchronized (mSync) {
try {
mSync.wait();
} catch (final InterruptedException e) {
break;
}
}
}
} // end of while
if (DEBUG) Log.d(TAG, "Encoder thread exiting");
synchronized (mSync) {
mRequestStop = true;
mIsCapturing = false;
}
}
/*
* prepareing method for each sub class
* this method should be implemented in sub class, so set this as abstract method
* @throws IOException
*/
/*package*/ abstract void prepare() throws IOException;
/*package*/ void startRecording() {
if (DEBUG) Log.v(TAG, "startRecording");
synchronized (mSync) {
mIsCapturing = true;
mRequestStop = false;
mSync.notifyAll();
}
}
/**
* the method to request stop encoding
*/
/*package*/ void stopRecording() {
if (DEBUG) Log.v(TAG, "stopRecording");
synchronized (mSync) {
if (!mIsCapturing || mRequestStop) {
return;
}
mRequestStop = true; // for rejecting newer frame
mSync.notifyAll();
// We can not know when the encoding and writing finish.
// so we return immediately after request to avoid delay of caller thread
}
}
//********************************************************************************
//********************************************************************************
/**
* Release all releated objects
*/
protected void release() {
if (DEBUG) Log.d(TAG, "release:");
try {
mListener.onStopped(this);
} catch (final Exception e) {
Log.e(TAG, "failed onStopped", e);
}
mIsCapturing = false;
if (mMediaCodec != null) {
try {
mMediaCodec.stop();
mMediaCodec.release();
mMediaCodec = null;
} catch (final Exception e) {
Log.e(TAG, "failed releasing MediaCodec", e);
}
}
if (mMuxerStarted) {
final MediaMuxerWrapper muxer = mWeakMuxer != null ? mWeakMuxer.get() : null;
if (muxer != null) {
try {
muxer.stop();
} catch (final Exception e) {
Log.e(TAG, "failed stopping muxer", e);
}
}
}
mBufferInfo = null;
}
protected void signalEndOfInputStream() {
if (DEBUG) Log.d(TAG, "sending EOS to encoder");
// signalEndOfInputStream is only avairable for video encoding with surface
// and equivalent sending a empty buffer with BUFFER_FLAG_END_OF_STREAM flag.
// mMediaCodec.signalEndOfInputStream(); // API >= 18
encode(null, 0, getPTSUs());
}
/**
* Method to set byte array to the MediaCodec encoder
* @param buffer
* @param length length of byte array, zero means EOS.
* @param presentationTimeUs
*/
protected void encode(final ByteBuffer buffer, final int length, final long presentationTimeUs) {
if (!mIsCapturing) return;
final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
while (mIsCapturing) {
final int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
Log.e(TAG, "inputBufferIndex: "+inputBufferIndex );
if (inputBufferIndex >= 0) {
final ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
if (buffer != null) {
inputBuffer.put(buffer);
}
if (DEBUG) Log.v(TAG, "encode:queueInputBuffer");
if (length <= 0) {
// send EOS
mIsEOS = true;
if (DEBUG) Log.i(TAG, "send BUFFER_FLAG_END_OF_STREAM");
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, 0,
presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
break;
} else {
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, length,
presentationTimeUs, 0);
}
break;
} else if (inputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// wait for MediaCodec encoder is ready to encode
// nothing to do here because MediaCodec#dequeueInputBuffer(TIMEOUT_USEC)
// will wait for maximum TIMEOUT_USEC(10msec) on each call
}
}
}
/**
* drain encoded data and write them to muxer
*/
protected void drain() {
if (mMediaCodec == null) return;
ByteBuffer[] encoderOutputBuffers = mMediaCodec.getOutputBuffers();
Log.e(TAG, "encoderOutputBuffers: "+encoderOutputBuffers.length );
int encoderStatus, count = 0;
final MediaMuxerWrapper muxer = mWeakMuxer.get();
if (muxer == null) {
// throw new NullPointerException("muxer is unexpectedly null");
Log.w(TAG, "muxer is unexpectedly null");
return;
}
Log.e(TAG, "mIsCapturing: "+mIsCapturing );
LOOP: while (mIsCapturing) {
// get encoded data with maximum timeout duration of TIMEOUT_USEC(=10[msec])
encoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
Log.e(TAG, "encoderStatus: "+encoderStatus );
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// wait 5 counts(=TIMEOUT_USEC x 5 = 50msec) until data/EOS come
if (!mIsEOS) {
if (++count > 5)
break LOOP; // out of while
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
if (DEBUG) Log.v(TAG, "INFO_OUTPUT_BUFFERS_CHANGED");
// this shoud not come when encoding
encoderOutputBuffers = mMediaCodec.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
if (DEBUG) Log.v(TAG, "INFO_OUTPUT_FORMAT_CHANGED");
// this status indicate the output format of codec is changed
// this should come only once before actual encoded data
// but this status never come on Android4.3 or less
// and in that case, you should treat when MediaCodec.BUFFER_FLAG_CODEC_CONFIG come.
if (mMuxerStarted) { // second time request is error
throw new RuntimeException("format changed twice");
}
// get output format from codec and pass them to muxer
// getOutputFormat should be called after INFO_OUTPUT_FORMAT_CHANGED otherwise crash.
final MediaFormat format = mMediaCodec.getOutputFormat(); // API >= 16
mTrackIndex = muxer.addTrack(format);
mMuxerStarted = true;
if (!muxer.start()) {
// we should wait until muxer is ready
synchronized (muxer) {
while (!muxer.isStarted())
try {
muxer.wait(100);
} catch (final InterruptedException e) {
break LOOP;
}
}
}
} else if (encoderStatus < 0) {
// unexpected status
if (DEBUG) Log.w(TAG, "drain:unexpected result from encoder#dequeueOutputBuffer: " + encoderStatus);
} else {
final ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
// this never should come...may be a MediaCodec internal error
throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// You shoud set output format to muxer here when you target Android4.3 or less
// but MediaCodec#getOutputFormat can not call here(because INFO_OUTPUT_FORMAT_CHANGED don't come yet)
// therefor we should expand and prepare output format from buffer data.
// This sample is for API>=18(>=Android 4.3), just ignore this flag here
if (DEBUG) Log.d(TAG, "drain:BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
// encoded data is ready, clear waiting counter
count = 0;
if (!mMuxerStarted) {
// muxer is not ready...this will prrograming failure.
throw new RuntimeException("drain:muxer hasn't started");
}
// write encoded data to muxer(need to adjust presentationTimeUs.
mBufferInfo.presentationTimeUs = getPTSUs();
muxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
prevOutputPTSUs = mBufferInfo.presentationTimeUs;
}
// return buffer to encoder
mMediaCodec.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
// when EOS come.
mIsCapturing = false;
break; // out of while
}
}
}
}
/**
* previous presentationTimeUs for writing
*/
private long prevOutputPTSUs = 0;
/**
* get next encoding presentationTimeUs
* @return
*/
protected long getPTSUs() {
long result = System.nanoTime() / 1000L;
// presentationTimeUs should be monotonic
// otherwise muxer fail to write
if (result < prevOutputPTSUs)
result = (prevOutputPTSUs - result) + result;
return result;
}
}

View File

@ -0,0 +1,232 @@
package me.lake.librestreaming.encoder;
import android.content.Context;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.Environment;
import android.text.TextUtils;
import android.util.Log;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.text.SimpleDateFormat;
import java.util.GregorianCalendar;
import java.util.Locale;
public class MediaMuxerWrapper {
private static final boolean DEBUG = false; // TODO set false on release
private static final String TAG = "MediaMuxerWrapper";
private static final String DIR_NAME = "WSLive";
private static final SimpleDateFormat mDateTimeFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss", Locale.US);
private String mOutputPath;
private final MediaMuxer mMediaMuxer; // API >= 18
private int mEncoderCount, mStatredCount;
private boolean mIsStarted;
private MediaEncoder mVideoEncoder, mAudioEncoder;
/**
* Constructor
* @param ext extension of output file
* @throws IOException
*/
public MediaMuxerWrapper(String ext) throws IOException {
if (TextUtils.isEmpty(ext)) ext = ".mp4";
try {
mOutputPath = getCaptureFile(Environment.DIRECTORY_MOVIES, ext).toString();
//mOutputPath =newTmpDir("Movies");/* getCaptureFile(Environment.DIRECTORY_MOVIES, ext).toString();*/
} catch (final NullPointerException e) {
throw new RuntimeException("This app has no permission of writing external storage");
}
mMediaMuxer = new MediaMuxer(mOutputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
mEncoderCount = mStatredCount = 0;
mIsStarted = false;
}
public static final String ROOT_DIR = "video";
private static final String DIR_TMP = "tmp";
private static Context mContext;
public static void setContext(Context context){
mContext = context;
}
/**
* 新建tmp目录,tmp/xxx/
*
* @param dirName
* @return
*/
public static String newTmpDir(String dirName) {
File tmpDir = new File(getStorageRoot(mContext, ROOT_DIR, true), DIR_TMP);
if (!tmpDir.exists() || !tmpDir.isDirectory()) {
tmpDir.mkdirs();
}
File dir = new File(tmpDir, dirName);
if (!dir.exists() || !dir.isDirectory()) {
dir.mkdirs();
}
return dir.getAbsolutePath()+getDateTimeString() + ".mp4";
}
/**
* 获取缓存root路径
*
* @param context
* @param isExternFirst 是否外存优先
* @return
*/
public static File getStorageRoot(Context context, String dirName, boolean isExternFirst) {
File cacheDir = null;
if ((Environment.MEDIA_MOUNTED.equals(Environment.getExternalStorageState())
|| !Environment.isExternalStorageRemovable()) && isExternFirst) {
cacheDir = context.getExternalCacheDir();
} else {
cacheDir = context.getCacheDir();
}
File dir = new File(cacheDir, dirName);
if (!dir.exists() || !dir.isDirectory()) {
dir.mkdirs();
}
return dir;
}
public String getOutputPath() {
return mOutputPath;
}
public void prepare() throws IOException {
if (mVideoEncoder != null)
mVideoEncoder.prepare();
if (mAudioEncoder != null)
mAudioEncoder.prepare();
}
public void startRecording() {
if (mVideoEncoder != null)
mVideoEncoder.startRecording();
if (mAudioEncoder != null)
mAudioEncoder.startRecording();
}
public void stopRecording() {
if (mVideoEncoder != null)
mVideoEncoder.stopRecording();
mVideoEncoder = null;
if (mAudioEncoder != null)
mAudioEncoder.stopRecording();
mAudioEncoder = null;
}
public synchronized boolean isStarted() {
return mIsStarted;
}
//**********************************************************************
//**********************************************************************
/**
* assign encoder to this calss. this is called from encoder.
* @param encoder instance of MediaVideoEncoder or MediaAudioEncoder
*/
/*package*/ void addEncoder(final MediaEncoder encoder) {
if (encoder instanceof MediaVideoEncoder) {
if (mVideoEncoder != null)
throw new IllegalArgumentException("Video encoder already added.");
mVideoEncoder = encoder;
} else if (encoder instanceof MediaAudioEncoder) {
if (mAudioEncoder != null)
throw new IllegalArgumentException("Video encoder already added.");
mAudioEncoder = encoder;
} else
throw new IllegalArgumentException("unsupported encoder");
mEncoderCount = (mVideoEncoder != null ? 1 : 0) + (mAudioEncoder != null ? 1 : 0);
}
/**
* request start recording from encoder
* @return true when muxer is ready to write
*/
/*package*/ synchronized boolean start() {
if (DEBUG) Log.v(TAG, "start:");
mStatredCount++;
if ((mEncoderCount > 0) && (mStatredCount == mEncoderCount)) {
mMediaMuxer.start();
mIsStarted = true;
notifyAll();
if (DEBUG) Log.v(TAG, "MediaMuxer started:");
}
return mIsStarted;
}
/**
* request stop recording from encoder when encoder received EOS
*/
/*package*/ synchronized void stop() {
if (DEBUG) Log.v(TAG, "stop:mStatredCount=" + mStatredCount);
mStatredCount--;
if ((mEncoderCount > 0) && (mStatredCount <= 0)) {
mMediaMuxer.stop();
mMediaMuxer.release();
mIsStarted = false;
if (DEBUG) Log.v(TAG, "MediaMuxer stopped:");
}
}
/**
* assign encoder to muxer
* @param format
* @return minus value indicate error
*/
/*package*/ synchronized int addTrack(final MediaFormat format) {
if (mIsStarted)
throw new IllegalStateException("muxer already started");
final int trackIx = mMediaMuxer.addTrack(format);
if (DEBUG) Log.i(TAG, "addTrack:trackNum=" + mEncoderCount + ",trackIx=" + trackIx + ",format=" + format);
return trackIx;
}
/**
* write encoded data to muxer
* @param trackIndex
* @param byteBuf
* @param bufferInfo
*/
/*package*/ synchronized void writeSampleData(final int trackIndex, final ByteBuffer byteBuf, final MediaCodec.BufferInfo bufferInfo) {
if (mStatredCount > 0)
mMediaMuxer.writeSampleData(trackIndex, byteBuf, bufferInfo);
}
//**********************************************************************
//**********************************************************************
/**
* generate output file
* @param type Environment.DIRECTORY_MOVIES / Environment.DIRECTORY_DCIM etc.
* @param ext .mp4(.m4a for audio) or .png
* @return return null when this app has no writing permission to external storage.
*/
public static final File getCaptureFile(final String type, final String ext) {
final File dir = new File(Environment.getExternalStoragePublicDirectory(type), DIR_NAME);
Log.d(TAG, "path=" + dir.toString());
dir.mkdirs();
if (dir.canWrite()) {
return new File(dir, getDateTimeString() + ext);
}
return null;
}
/**
* get current date and time as String
* @return
*/
private static final String getDateTimeString() {
final GregorianCalendar now = new GregorianCalendar();
return mDateTimeFormat.format(now.getTime());
}
public String getFilePath(){
return mOutputPath;
}
}

View File

@ -0,0 +1,251 @@
package me.lake.librestreaming.encoder;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.opengl.EGLContext;
import android.opengl.Matrix;
import android.util.Log;
import android.view.Surface;
import java.io.IOException;
import me.lake.librestreaming.encoder.utils.RenderHandler;
public class MediaVideoEncoder extends MediaEncoder {
private static final boolean DEBUG = false; // TODO set false on release
private static final String TAG = "MediaVideoEncoder";
private static final String MIME_TYPE = "video/avc";
// parameters for recording
private static final int FRAME_RATE = 24;
private static final float BPP = 0.25f;
private final int mWidth;
private final int mHeight;
private RenderHandler mRenderHandler;
private Surface mSurface;
private int previewW, previewH; //预览宽高
private float[] mvpMatrix = new float[]{
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
};
private boolean isMatrixCalc = false;
public MediaVideoEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener, final int width, final int height) {
super(muxer, listener);
if (DEBUG) Log.i(TAG, "MediaVideoEncoder: ");
mWidth = width;
mHeight = height;
mRenderHandler = RenderHandler.createHandler(TAG);
}
public boolean frameAvailableSoon(final float[] tex_matrix) {
boolean result;
if (result = super.frameAvailableSoon())
mRenderHandler.draw(tex_matrix);
return result;
}
public boolean frameAvailableSoon(final float[] tex_matrix, final float[] mvp_matrix) {
boolean result;
if (result = super.frameAvailableSoon())
mRenderHandler.draw(tex_matrix, mvp_matrix);
return result;
}
@Override
public boolean frameAvailableSoon() {
boolean result;
if (result = super.frameAvailableSoon())
mRenderHandler.draw(null);
return result;
}
@Override
protected void prepare() throws IOException {
if (DEBUG) Log.i(TAG, "prepare: ");
mTrackIndex = -1;
mMuxerStarted = mIsEOS = false;
final MediaCodecInfo videoCodecInfo = selectVideoCodec(MIME_TYPE);
if (videoCodecInfo == null) {
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
return;
}
if (DEBUG) Log.i(TAG, "selected codec: " + videoCodecInfo.getName());
final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); // API >= 18
format.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate());
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 6);
if (DEBUG) Log.i(TAG, "format: " + format);
mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
// get Surface for encoder input
// this method only can call between #configure and #start
mSurface = mMediaCodec.createInputSurface(); // API >= 18
mMediaCodec.start();
if (DEBUG) Log.i(TAG, "prepare finishing");
if (mListener != null) {
try {
mListener.onPrepared(this);
} catch (final Exception e) {
Log.e(TAG, "prepare:", e);
}
}
}
public void setEglContext(final EGLContext shared_context, final int tex_id) {
mRenderHandler.setEglContext(shared_context, tex_id, mSurface, true);
}
@Override
protected void release() {
if (DEBUG) Log.i(TAG, "release:");
if (mSurface != null) {
mSurface.release();
mSurface = null;
}
if (mRenderHandler != null) {
mRenderHandler.release();
mRenderHandler = null;
}
super.release();
}
private int calcBitRate() {
final int bitrate = (int) (BPP * FRAME_RATE * mWidth * mHeight);
Log.i(TAG, String.format("bitrate=%5.2f[Mbps]", bitrate / 1024f / 1024f));
return bitrate;
}
/**
* select the first codec that match a specific MIME type
*
* @param mimeType
* @return null if no codec matched
*/
protected static final MediaCodecInfo selectVideoCodec(final String mimeType) {
if (DEBUG) Log.v(TAG, "selectVideoCodec:");
// get the list of available codecs
final int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) { // skipp decoder
continue;
}
// select first codec that match a specific MIME type and color format
final String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
if (DEBUG) Log.i(TAG, "codec:" + codecInfo.getName() + ",MIME=" + types[j]);
final int format = selectColorFormat(codecInfo, mimeType);
if (format > 0) {
return codecInfo;
}
}
}
}
return null;
}
/**
* select color format available on specific codec and we can use.
*
* @return 0 if no colorFormat is matched
*/
protected static final int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) {
if (DEBUG) Log.i(TAG, "selectColorFormat: ");
int result = 0;
final MediaCodecInfo.CodecCapabilities caps;
try {
Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
caps = codecInfo.getCapabilitiesForType(mimeType);
} finally {
Thread.currentThread().setPriority(Thread.NORM_PRIORITY);
}
int colorFormat;
for (int i = 0; i < caps.colorFormats.length; i++) {
colorFormat = caps.colorFormats[i];
if (isRecognizedViewoFormat(colorFormat)) {
if (result == 0)
result = colorFormat;
break;
}
}
if (result == 0)
Log.e(TAG, "couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType);
return result;
}
/**
* color formats that we can use in this class
*/
protected static int[] recognizedFormats;
static {
recognizedFormats = new int[]{
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
// MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface,
};
}
private static final boolean isRecognizedViewoFormat(final int colorFormat) {
if (DEBUG) Log.i(TAG, "isRecognizedViewoFormat:colorFormat=" + colorFormat);
final int n = recognizedFormats != null ? recognizedFormats.length : 0;
for (int i = 0; i < n; i++) {
if (recognizedFormats[i] == colorFormat) {
return true;
}
}
return false;
}
@Override
protected void signalEndOfInputStream() {
if (DEBUG) Log.d(TAG, "sending EOS to encoder");
mMediaCodec.signalEndOfInputStream(); // API >= 18
mIsEOS = true;
}
public void setPreviewWH(int previewW, int previewH) {
this.previewW = previewW;
this.previewH = previewH;
}
public float[] getMvpMatrix() {
if (previewW < 1 || previewH < 1) return null;
if (isMatrixCalc) return mvpMatrix;
float encodeWHRatio = mWidth * 1.0f / mHeight;
float previewWHRatio = previewW * 1.0f / previewH;
float[] projection = new float[16];
float[] camera = new float[16];
if (encodeWHRatio > previewWHRatio) {
Matrix.orthoM(projection, 0, -1, 1, -previewWHRatio / encodeWHRatio, previewWHRatio / encodeWHRatio, 1, 3);
} else {
Matrix.orthoM(projection, 0, -encodeWHRatio / previewWHRatio, encodeWHRatio / previewWHRatio, -1, 1, 1, 3);
}
Matrix.setLookAtM(camera, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0);
Matrix.multiplyMM(mvpMatrix, 0, projection, 0, camera, 0);
isMatrixCalc = true;
return mvpMatrix;
}
}

View File

@ -0,0 +1,324 @@
package me.lake.librestreaming.encoder.utils;
import android.annotation.TargetApi;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import android.os.Build;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
public class EGLBase { // API >= 17
private static final boolean DEBUG = false; // TODO set false on release
private static final String TAG = "EGLBase";
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
private EGLConfig mEglConfig = null;
private EGLContext mEglContext = EGL14.EGL_NO_CONTEXT;
private EGLDisplay mEglDisplay = EGL14.EGL_NO_DISPLAY;
private EGLContext mDefaultContext = EGL14.EGL_NO_CONTEXT;
public static class EglSurface {
private final EGLBase mEgl;
private EGLSurface mEglSurface = EGL14.EGL_NO_SURFACE;
private final int mWidth, mHeight;
EglSurface(final EGLBase egl, final Object surface) {
if (DEBUG) Log.v(TAG, "EglSurface:");
if (!(surface instanceof SurfaceView)
&& !(surface instanceof Surface)
&& !(surface instanceof SurfaceHolder)
&& !(surface instanceof SurfaceTexture))
throw new IllegalArgumentException("unsupported surface");
mEgl = egl;
mEglSurface = mEgl.createWindowSurface(surface);
mWidth = mEgl.querySurface(mEglSurface, EGL14.EGL_WIDTH);
mHeight = mEgl.querySurface(mEglSurface, EGL14.EGL_HEIGHT);
if (DEBUG) Log.v(TAG, String.format("EglSurface:size(%d,%d)", mWidth, mHeight));
}
EglSurface(final EGLBase egl, final int width, final int height) {
if (DEBUG) Log.v(TAG, "EglSurface:");
mEgl = egl;
mEglSurface = mEgl.createOffscreenSurface(width, height);
mWidth = width;
mHeight = height;
}
public void makeCurrent() {
mEgl.makeCurrent(mEglSurface);
}
public void swap() {
mEgl.swap(mEglSurface);
}
public EGLContext getContext() {
return mEgl.getContext();
}
public void release() {
if (DEBUG) Log.v(TAG, "EglSurface:release:");
mEgl.makeDefault();
mEgl.destroyWindowSurface(mEglSurface);
mEglSurface = EGL14.EGL_NO_SURFACE;
}
public int getWidth() {
return mWidth;
}
public int getHeight() {
return mHeight;
}
}
public EGLBase(final EGLContext shared_context, final boolean with_depth_buffer, final boolean isRecordable) {
if (DEBUG) Log.v(TAG, "EGLBase:");
init(shared_context, with_depth_buffer, isRecordable);
}
public void release() {
if (DEBUG) Log.v(TAG, "release:");
if (mEglDisplay != EGL14.EGL_NO_DISPLAY) {
destroyContext();
EGL14.eglTerminate(mEglDisplay);
EGL14.eglReleaseThread();
}
mEglDisplay = EGL14.EGL_NO_DISPLAY;
mEglContext = EGL14.EGL_NO_CONTEXT;
}
public EglSurface createFromSurface(final Object surface) {
if (DEBUG) Log.v(TAG, "createFromSurface:");
final EglSurface eglSurface = new EglSurface(this, surface);
eglSurface.makeCurrent();
return eglSurface;
}
public EglSurface createOffscreen(final int width, final int height) {
if (DEBUG) Log.v(TAG, "createOffscreen:");
final EglSurface eglSurface = new EglSurface(this, width, height);
eglSurface.makeCurrent();
return eglSurface;
}
public EGLContext getContext() {
return mEglContext;
}
public int querySurface(final EGLSurface eglSurface, final int what) {
final int[] value = new int[1];
EGL14.eglQuerySurface(mEglDisplay, eglSurface, what, value, 0);
return value[0];
}
private void init(EGLContext shared_context, final boolean with_depth_buffer, final boolean isRecordable) {
if (DEBUG) Log.v(TAG, "init:");
if (mEglDisplay != EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("EGL already set up");
}
mEglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (mEglDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("eglGetDisplay failed");
}
final int[] version = new int[2];
if (!EGL14.eglInitialize(mEglDisplay, version, 0, version, 1)) {
mEglDisplay = null;
throw new RuntimeException("eglInitialize failed");
}
shared_context = shared_context != null ? shared_context : EGL14.EGL_NO_CONTEXT;
if (mEglContext == EGL14.EGL_NO_CONTEXT) {
mEglConfig = getConfig(with_depth_buffer, isRecordable);
if (mEglConfig == null) {
throw new RuntimeException("chooseConfig failed");
}
// create EGL rendering context
mEglContext = createContext(shared_context);
}
// confirm whether the EGL rendering context is successfully created
final int[] values = new int[1];
EGL14.eglQueryContext(mEglDisplay, mEglContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, values, 0);
if (DEBUG) Log.d(TAG, "EGLContext created, client version " + values[0]);
makeDefault(); // makeCurrent(EGL14.EGL_NO_SURFACE);
}
/**
* change context to draw this window surface
* @return
*/
private boolean makeCurrent(final EGLSurface surface) {
// if (DEBUG) Log.v(TAG, "makeCurrent:");
if (mEglDisplay == null) {
if (DEBUG) Log.d(TAG, "makeCurrent:eglDisplay not initialized");
}
if (surface == null || surface == EGL14.EGL_NO_SURFACE) {
final int error = EGL14.eglGetError();
if (error == EGL14.EGL_BAD_NATIVE_WINDOW) {
Log.e(TAG, "makeCurrent:returned EGL_BAD_NATIVE_WINDOW.");
}
return false;
}
// attach EGL renderring context to specific EGL window surface
if (!EGL14.eglMakeCurrent(mEglDisplay, surface, surface, mEglContext)) {
Log.w(TAG, "eglMakeCurrent:" + EGL14.eglGetError());
return false;
}
return true;
}
private void makeDefault() {
if (DEBUG) Log.v(TAG, "makeDefault:");
if (!EGL14.eglMakeCurrent(mEglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
Log.w("TAG", "makeDefault" + EGL14.eglGetError());
}
}
private int swap(final EGLSurface surface) {
// if (DEBUG) Log.v(TAG, "swap:");
if (!EGL14.eglSwapBuffers(mEglDisplay, surface)) {
final int err = EGL14.eglGetError();
if (DEBUG) Log.w(TAG, "swap:err=" + err);
return err;
}
return EGL14.EGL_SUCCESS;
}
private EGLContext createContext(final EGLContext shared_context) {
// if (DEBUG) Log.v(TAG, "createContext:");
final int[] attrib_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
final EGLContext context = EGL14.eglCreateContext(mEglDisplay, mEglConfig, shared_context, attrib_list, 0);
checkEglError("eglCreateContext");
return context;
}
private void destroyContext() {
if (DEBUG) Log.v(TAG, "destroyContext:");
if (!EGL14.eglDestroyContext(mEglDisplay, mEglContext)) {
Log.e("destroyContext", "display:" + mEglDisplay + " context: " + mEglContext);
Log.e(TAG, "eglDestroyContex:" + EGL14.eglGetError());
}
mEglContext = EGL14.EGL_NO_CONTEXT;
if (mDefaultContext != EGL14.EGL_NO_CONTEXT) {
if (!EGL14.eglDestroyContext(mEglDisplay, mDefaultContext)) {
Log.e("destroyContext", "display:" + mEglDisplay + " context: " + mDefaultContext);
Log.e(TAG, "eglDestroyContex:" + EGL14.eglGetError());
}
mDefaultContext = EGL14.EGL_NO_CONTEXT;
}
}
private EGLSurface createWindowSurface(final Object nativeWindow) {
if (DEBUG) Log.v(TAG, "createWindowSurface:nativeWindow=" + nativeWindow);
final int[] surfaceAttribs = {
EGL14.EGL_NONE
};
EGLSurface result = null;
try {
result = EGL14.eglCreateWindowSurface(mEglDisplay, mEglConfig, nativeWindow, surfaceAttribs, 0);
} catch (final IllegalArgumentException e) {
Log.e(TAG, "eglCreateWindowSurface", e);
}
return result;
}
/**
* Creates an EGL surface associated with an offscreen buffer.
*/
private EGLSurface createOffscreenSurface(final int width, final int height) {
if (DEBUG) Log.v(TAG, "createOffscreenSurface:");
final int[] surfaceAttribs = {
EGL14.EGL_WIDTH, width,
EGL14.EGL_HEIGHT, height,
EGL14.EGL_NONE
};
EGLSurface result = null;
try {
result = EGL14.eglCreatePbufferSurface(mEglDisplay, mEglConfig, surfaceAttribs, 0);
checkEglError("eglCreatePbufferSurface");
if (result == null) {
throw new RuntimeException("surface was null");
}
} catch (final IllegalArgumentException e) {
Log.e(TAG, "createOffscreenSurface", e);
} catch (final RuntimeException e) {
Log.e(TAG, "createOffscreenSurface", e);
}
return result;
}
private void destroyWindowSurface(EGLSurface surface) {
if (DEBUG) Log.v(TAG, "destroySurface:");
if (surface != EGL14.EGL_NO_SURFACE) {
EGL14.eglMakeCurrent(mEglDisplay,
EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
EGL14.eglDestroySurface(mEglDisplay, surface);
}
surface = EGL14.EGL_NO_SURFACE;
if (DEBUG) Log.v(TAG, "destroySurface:finished");
}
private void checkEglError(final String msg) {
int error;
if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
}
}
@SuppressWarnings("unused")
private EGLConfig getConfig(final boolean with_depth_buffer, final boolean isRecordable) {
final int[] attribList = {
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
EGL14.EGL_NONE, EGL14.EGL_NONE, //EGL14.EGL_STENCIL_SIZE, 8,
EGL14.EGL_NONE, EGL14.EGL_NONE, //EGL_RECORDABLE_ANDROID, 1, // this flag need to recording of MediaCodec
EGL14.EGL_NONE, EGL14.EGL_NONE, // with_depth_buffer ? EGL14.EGL_DEPTH_SIZE : EGL14.EGL_NONE,
// with_depth_buffer ? 16 : 0,
EGL14.EGL_NONE
};
int offset = 10;
if (false) { // ステンシルバッファ(常時未使用)
attribList[offset++] = EGL14.EGL_STENCIL_SIZE;
attribList[offset++] = 8;
}
if (with_depth_buffer) { // デプスバッファ
attribList[offset++] = EGL14.EGL_DEPTH_SIZE;
attribList[offset++] = 16;
}
if (isRecordable && (Build.VERSION.SDK_INT >= 18)) {// MediaCodecの入力用Surfaceの場合
attribList[offset++] = EGL_RECORDABLE_ANDROID;
attribList[offset++] = 1;
}
for (int i = attribList.length - 1; i >= offset; i--) {
attribList[i] = EGL14.EGL_NONE;
}
final EGLConfig[] configs = new EGLConfig[1];
final int[] numConfigs = new int[1];
if (!EGL14.eglChooseConfig(mEglDisplay, attribList, 0, configs, 0, configs.length, numConfigs, 0)) {
// XXX it will be better to fallback to RGB565
Log.w(TAG, "unable to find RGBA8888 / " + " EGLConfig");
return null;
}
return configs[0];
}
}

View File

@ -0,0 +1,189 @@
package me.lake.librestreaming.encoder.utils;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.Matrix;
import android.util.Log;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
/**
* Helper class to draw to whole view using specific texture and texture matrix
*/
public class GLDrawer2D {
private static final boolean DEBUG = false; // TODO set false on release
private static final String TAG = "GLDrawer2D";
private static final String vss
= "uniform mat4 uMVPMatrix;\n"
+ "uniform mat4 uTexMatrix;\n"
+ "attribute highp vec4 aPosition;\n"
+ "attribute highp vec4 aTextureCoord;\n"
+ "varying highp vec2 vTextureCoord;\n"
+ "\n"
+ "void main() {\n"
+ " gl_Position = uMVPMatrix * aPosition;\n"
+ " vTextureCoord = (uTexMatrix * aTextureCoord).xy;\n"
+ "}\n";
private static final String fss
= "precision mediump float;\n"
+ "uniform sampler2D sTexture;\n"
+ "varying highp vec2 vTextureCoord;\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(sTexture, vTextureCoord);\n"
+ "}";
private static final float[] VERTICES = { 1.0f, 1.0f, -1.0f, 1.0f, 1.0f, -1.0f, -1.0f, -1.0f };
//private static final float[] TEXCOORD = { 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f };
private static final float[] TEXCOORD = { 1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f };
private final FloatBuffer pVertex;
private final FloatBuffer pTexCoord;
private int hProgram;
int maPositionLoc;
int maTextureCoordLoc;
int muMVPMatrixLoc;
int muTexMatrixLoc;
private final float[] mMvpMatrix = new float[16];
private static final int FLOAT_SZ = Float.SIZE / 8;
private static final int VERTEX_NUM = 4;
private static final int VERTEX_SZ = VERTEX_NUM * 2;
/**
* Constructor
* this should be called in GL context
*/
public GLDrawer2D() {
pVertex = ByteBuffer.allocateDirect(VERTEX_SZ * FLOAT_SZ)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
pVertex.put(VERTICES);
pVertex.flip();
pTexCoord = ByteBuffer.allocateDirect(VERTEX_SZ * FLOAT_SZ)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
pTexCoord.put(TEXCOORD);
pTexCoord.flip();
hProgram = loadShader(vss, fss);
GLES20.glUseProgram(hProgram);
maPositionLoc = GLES20.glGetAttribLocation(hProgram, "aPosition");
maTextureCoordLoc = GLES20.glGetAttribLocation(hProgram, "aTextureCoord");
muMVPMatrixLoc = GLES20.glGetUniformLocation(hProgram, "uMVPMatrix");
muTexMatrixLoc = GLES20.glGetUniformLocation(hProgram, "uTexMatrix");
Matrix.setIdentityM(mMvpMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixLoc, 1, false, mMvpMatrix, 0);
GLES20.glUniformMatrix4fv(muTexMatrixLoc, 1, false, mMvpMatrix, 0);
GLES20.glVertexAttribPointer(maPositionLoc, 2, GLES20.GL_FLOAT, false, VERTEX_SZ, pVertex);
GLES20.glVertexAttribPointer(maTextureCoordLoc, 2, GLES20.GL_FLOAT, false, VERTEX_SZ, pTexCoord);
GLES20.glEnableVertexAttribArray(maPositionLoc);
GLES20.glEnableVertexAttribArray(maTextureCoordLoc);
}
/**
* terminatinng, this should be called in GL context
*/
public void release() {
if (hProgram >= 0)
GLES20.glDeleteProgram(hProgram);
hProgram = -1;
}
/**
* draw specific texture with specific texture matrix
* @param tex_id texture ID
* @param tex_matrix texture matrixif this is null, the last one use(we don't check size of this array and needs at least 16 of float)
*/
public void draw(final int tex_id, final float[] tex_matrix) {
GLES20.glUseProgram(hProgram);
if (tex_matrix != null)
GLES20.glUniformMatrix4fv(muTexMatrixLoc, 1, false, tex_matrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixLoc, 1, false, mMvpMatrix, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex_id);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, VERTEX_NUM);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glUseProgram(0);
}
/**
* Set model/view/projection transform matrix
* @param matrix
* @param offset
*/
public void setMatrix(final float[] matrix, final int offset) {
if ((matrix != null) && (matrix.length >= offset + 16)) {
System.arraycopy(matrix, offset, mMvpMatrix, 0, 16);
} else {
Matrix.setIdentityM(mMvpMatrix, 0);
}
}
/**
* create external texture
* @return texture ID
*/
public static int initTex() {
if (DEBUG) Log.v(TAG, "initTex:");
final int[] tex = new int[1];
GLES20.glGenTextures(1, tex, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex[0]);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
return tex[0];
}
/**
* delete specific texture
*/
public static void deleteTex(final int hTex) {
if (DEBUG) Log.v(TAG, "deleteTex:");
final int[] tex = new int[] {hTex};
GLES20.glDeleteTextures(1, tex, 0);
}
/**
* load, compile and link shader
* @param vss source of vertex shader
* @param fss source of fragment shader
* @return
*/
public static int loadShader(final String vss, final String fss) {
if (DEBUG) Log.v(TAG, "loadShader:");
int vs = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
GLES20.glShaderSource(vs, vss);
GLES20.glCompileShader(vs);
final int[] compiled = new int[1];
GLES20.glGetShaderiv(vs, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
if (DEBUG) Log.e(TAG, "Failed to compile vertex shader:"
+ GLES20.glGetShaderInfoLog(vs));
GLES20.glDeleteShader(vs);
vs = 0;
}
int fs = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(fs, fss);
GLES20.glCompileShader(fs);
GLES20.glGetShaderiv(fs, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
if (DEBUG) Log.w(TAG, "Failed to compile fragment shader:"
+ GLES20.glGetShaderInfoLog(fs));
GLES20.glDeleteShader(fs);
fs = 0;
}
final int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vs);
GLES20.glAttachShader(program, fs);
GLES20.glLinkProgram(program);
return program;
}
}

View File

@ -0,0 +1,211 @@
package me.lake.librestreaming.encoder.utils;
import android.graphics.SurfaceTexture;
import android.opengl.EGLContext;
import android.opengl.GLES20;
import android.opengl.Matrix;
import android.text.TextUtils;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
/**
* Helper class to draw texture to whole view on private thread
*/
public final class RenderHandler implements Runnable {
private static final boolean DEBUG = false; // TODO set false on release
private static final String TAG = "RenderHandler";
private final Object mSync = new Object();
private EGLContext mShard_context;
private boolean mIsRecordable;
private Object mSurface;
private int mTexId = -1;
private float[] mMatrix = new float[32];
private boolean mRequestSetEglContext;
private boolean mRequestRelease;
private int mRequestDraw;
public static final RenderHandler createHandler(final String name) {
if (DEBUG) Log.v(TAG, "createHandler:");
final RenderHandler handler = new RenderHandler();
synchronized (handler.mSync) {
new Thread(handler, !TextUtils.isEmpty(name) ? name : TAG).start();
try {
handler.mSync.wait();
} catch (final InterruptedException e) {
}
}
return handler;
}
public final void setEglContext(final EGLContext shared_context, final int tex_id, final Object surface, final boolean isRecordable) {
if (DEBUG) Log.i(TAG, "setEglContext:");
if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture) && !(surface instanceof SurfaceHolder))
throw new RuntimeException("unsupported window type:" + surface);
synchronized (mSync) {
if (mRequestRelease) return;
mShard_context = shared_context;
mTexId = tex_id;
mSurface = surface;
mIsRecordable = isRecordable;
mRequestSetEglContext = true;
Matrix.setIdentityM(mMatrix, 0);
Matrix.setIdentityM(mMatrix, 16);
mSync.notifyAll();
try {
mSync.wait();
} catch (final InterruptedException e) {
}
}
}
public final void draw() {
draw(mTexId, mMatrix, null);
}
public final void draw(final int tex_id) {
draw(tex_id, mMatrix, null);
}
public final void draw(final float[] tex_matrix) {
draw(mTexId, tex_matrix, null);
}
public final void draw(final float[] tex_matrix, final float[] mvp_matrix) {
draw(mTexId, tex_matrix, mvp_matrix);
}
public final void draw(final int tex_id, final float[] tex_matrix) {
draw(tex_id, tex_matrix, null);
}
public final void draw(final int tex_id, final float[] tex_matrix, final float[] mvp_matrix) {
synchronized (mSync) {
if (mRequestRelease) return;
mTexId = tex_id;
if ((tex_matrix != null) && (tex_matrix.length >= 16)) {
System.arraycopy(tex_matrix, 0, mMatrix, 0, 16);
} else {
Matrix.setIdentityM(mMatrix, 0);
}
if ((mvp_matrix != null) && (mvp_matrix.length >= 16)) {
System.arraycopy(mvp_matrix, 0, mMatrix, 16, 16);
} else {
Matrix.setIdentityM(mMatrix, 16);
}
mRequestDraw++;
mSync.notifyAll();
/* try {
mSync.wait();
} catch (final InterruptedException e) {
} */
}
}
public boolean isValid() {
synchronized (mSync) {
return !(mSurface instanceof Surface) || ((Surface)mSurface).isValid();
}
}
public final void release() {
if (DEBUG) Log.i(TAG, "release:");
synchronized (mSync) {
if (mRequestRelease) return;
mRequestRelease = true;
mSync.notifyAll();
try {
mSync.wait();
} catch (final InterruptedException e) {
}
}
}
//********************************************************************************
//********************************************************************************
private EGLBase mEgl;
private EGLBase.EglSurface mInputSurface;
private GLDrawer2D mDrawer;
@Override
public final void run() {
if (DEBUG) Log.i(TAG, "RenderHandler thread started:");
synchronized (mSync) {
mRequestSetEglContext = mRequestRelease = false;
mRequestDraw = 0;
mSync.notifyAll();
}
boolean localRequestDraw;
for (;;) {
synchronized (mSync) {
if (mRequestRelease) break;
if (mRequestSetEglContext) {
mRequestSetEglContext = false;
internalPrepare();
}
localRequestDraw = mRequestDraw > 0;
if (localRequestDraw) {
mRequestDraw--;
// mSync.notifyAll();
}
}
if (localRequestDraw) {
if ((mEgl != null) && mTexId >= 0) {
mInputSurface.makeCurrent();
// clear screen with yellow color so that you can see rendering rectangle
GLES20.glClearColor(1.0f, 1.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
mDrawer.setMatrix(mMatrix, 16);
mDrawer.draw(mTexId, mMatrix);
mInputSurface.swap();
}
} else {
synchronized(mSync) {
try {
mSync.wait();
} catch (final InterruptedException e) {
break;
}
}
}
}
synchronized (mSync) {
mRequestRelease = true;
internalRelease();
mSync.notifyAll();
}
if (DEBUG) Log.i(TAG, "RenderHandler thread finished:");
}
private final void internalPrepare() {
if (DEBUG) Log.i(TAG, "internalPrepare:");
internalRelease();
mEgl = new EGLBase(mShard_context, false, mIsRecordable);
mInputSurface = mEgl.createFromSurface(mSurface);
mInputSurface.makeCurrent();
mDrawer = new GLDrawer2D();
mSurface = null;
mSync.notifyAll();
}
private final void internalRelease() {
if (DEBUG) Log.i(TAG, "internalRelease:");
if (mInputSurface != null) {
mInputSurface.release();
mInputSurface = null;
}
if (mDrawer != null) {
mDrawer.release();
mDrawer = null;
}
if (mEgl != null) {
mEgl.release();
mEgl = null;
}
}
}

View File

@ -0,0 +1,31 @@
package me.lake.librestreaming.filter.hardvideofilter;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import me.lake.librestreaming.core.GLHelper;
public class BaseHardVideoFilter {
protected int SIZE_WIDTH;
protected int SIZE_HEIGHT;
protected int directionFlag=-1;
protected ShortBuffer drawIndecesBuffer;
public void onInit(int VWidth, int VHeight) {
SIZE_WIDTH = VWidth;
SIZE_HEIGHT = VHeight;
drawIndecesBuffer = GLHelper.getDrawIndecesBuffer();
}
public void onDraw(final int cameraTexture,final int targetFrameBuffer, final FloatBuffer shapeBuffer, final FloatBuffer textrueBuffer) {
}
public void onDestroy() {
}
public void onDirectionUpdate(int _directionFlag) {
this.directionFlag = _directionFlag;
}
}

View File

@ -0,0 +1,92 @@
package me.lake.librestreaming.filter.hardvideofilter;
import android.opengl.GLES20;
import java.nio.FloatBuffer;
import java.util.LinkedList;
import java.util.List;
import me.lake.librestreaming.tools.GLESTools;
public class HardVideoGroupFilter extends BaseHardVideoFilter {
private LinkedList<FilterWrapper> filterWrappers;
public HardVideoGroupFilter(List<BaseHardVideoFilter> filters) {
if (filters == null || filters.isEmpty()) {
throw new IllegalArgumentException("can not create empty GroupFilter");
}
filterWrappers = new LinkedList<FilterWrapper>();
for (BaseHardVideoFilter filter : filters) {
filterWrappers.add(new FilterWrapper(filter));
}
}
@Override
public void onInit(int VWidth, int VHeight) {
super.onInit(VWidth, VHeight);
int i = 0;
for (FilterWrapper wrapper : filterWrappers) {
wrapper.filter.onInit(VWidth, VHeight);
int[] frameBuffer = new int[1];
int[] frameBufferTexture = new int[1];
GLESTools.createFrameBuff(frameBuffer,
frameBufferTexture,
SIZE_WIDTH,
SIZE_HEIGHT);
wrapper.frameBuffer = frameBuffer[0];
wrapper.frameBufferTexture = frameBufferTexture[0];
i++;
}
}
@Override
public void onDraw(int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textrueBuffer) {
FilterWrapper preFilterWrapper = null;
int i = 0;
int texture;
for (FilterWrapper wrapper : filterWrappers) {
if (preFilterWrapper == null) {
texture = cameraTexture;
} else {
texture = preFilterWrapper.frameBufferTexture;
}
if (i == (filterWrappers.size() - 1)) {
wrapper.filter.onDraw(texture, targetFrameBuffer, shapeBuffer, textrueBuffer);
} else {
wrapper.filter.onDraw(texture, wrapper.frameBuffer, shapeBuffer, textrueBuffer);
}
preFilterWrapper = wrapper;
i++;
}
}
@Override
public void onDestroy() {
super.onDestroy();
for (FilterWrapper wrapper : filterWrappers) {
wrapper.filter.onDestroy();
GLES20.glDeleteFramebuffers(1, new int[]{wrapper.frameBuffer}, 0);
GLES20.glDeleteTextures(1, new int[]{wrapper.frameBufferTexture}, 0);
}
}
@Override
public void onDirectionUpdate(int _directionFlag) {
super.onDirectionUpdate(_directionFlag);
for (FilterWrapper wrapper : filterWrappers) {
wrapper.filter.onDirectionUpdate(_directionFlag);
}
}
private class FilterWrapper {
BaseHardVideoFilter filter;
int frameBuffer;
int frameBufferTexture;
FilterWrapper(BaseHardVideoFilter filter) {
this.filter = filter;
}
}
}

View File

@ -0,0 +1,97 @@
package me.lake.librestreaming.filter.hardvideofilter;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import java.nio.FloatBuffer;
import me.lake.librestreaming.tools.GLESTools;
public class OriginalHardVideoFilter extends BaseHardVideoFilter {
protected int glProgram;
protected int glTextureLoc;
protected int glCamPostionLoc;
protected int glCamTextureCoordLoc;
protected String vertexShader_filter = "" +
"attribute vec4 aCamPosition;\n" +
"attribute vec2 aCamTextureCoord;\n" +
"varying vec2 vCamTextureCoord;\n" +
"void main(){\n" +
" gl_Position= aCamPosition;\n" +
" vCamTextureCoord = aCamTextureCoord;\n" +
"}";
protected String fragmentshader_filter = "" +
"precision highp float;\n" +
"varying highp vec2 vCamTextureCoord;\n" +
"uniform sampler2D uCamTexture;\n" +
"void main(){\n" +
" vec4 color = texture2D(uCamTexture, vCamTextureCoord);\n" +
" gl_FragColor = color;\n" +
"}";
public OriginalHardVideoFilter(String vertexShaderCode, String fragmentShaderCode) {
if (vertexShaderCode != null) {
vertexShader_filter = vertexShaderCode;
}
if (fragmentShaderCode != null) {
fragmentshader_filter = fragmentShaderCode;
}
}
@Override
public void onInit(int VWidth, int VHeight) {
super.onInit(VWidth, VHeight);
glProgram = GLESTools.createProgram(vertexShader_filter, fragmentshader_filter);
GLES20.glUseProgram(glProgram);
glTextureLoc = GLES20.glGetUniformLocation(glProgram, "uCamTexture");
glCamPostionLoc = GLES20.glGetAttribLocation(glProgram, "aCamPosition");
glCamTextureCoordLoc = GLES20.glGetAttribLocation(glProgram, "aCamTextureCoord");
}
@Override
public void onDraw(int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textrueBuffer) {
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, targetFrameBuffer);
GLES20.glUseProgram(glProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, cameraTexture);
GLES20.glUniform1i(glTextureLoc, 0);
GLES20.glEnableVertexAttribArray(glCamPostionLoc);
GLES20.glEnableVertexAttribArray(glCamTextureCoordLoc);
shapeBuffer.position(0);
GLES20.glVertexAttribPointer(glCamPostionLoc, 2,
GLES20.GL_FLOAT, false,
2 * 4, shapeBuffer);
textrueBuffer.position(0);
GLES20.glVertexAttribPointer(glCamTextureCoordLoc, 2,
GLES20.GL_FLOAT, false,
2 * 4, textrueBuffer);
onPreDraw();
GLES20.glViewport(0, 0, SIZE_WIDTH, SIZE_HEIGHT);
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawIndecesBuffer.limit(), GLES20.GL_UNSIGNED_SHORT, drawIndecesBuffer);
GLES20.glFinish();
onAfterDraw();
GLES20.glDisableVertexAttribArray(glCamPostionLoc);
GLES20.glDisableVertexAttribArray(glCamTextureCoordLoc);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
GLES20.glUseProgram(0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}
protected void onPreDraw() {
}
protected void onAfterDraw() {
}
@Override
public void onDestroy() {
super.onDestroy();
GLES20.glDeleteProgram(glProgram);
}
}

View File

@ -0,0 +1,28 @@
package me.lake.librestreaming.filter.softaudiofilter;
public class BaseSoftAudioFilter {
protected int SIZE;
protected int SIZE_HALF;
public void onInit(int size) {
SIZE = size;
SIZE_HALF = size/2;
}
/**
*
* @param orignBuff
* @param targetBuff
* @param presentationTimeMs
* @param sequenceNum
* @return false to use orignBuff,true to use targetBuff
*/
public boolean onFrame(byte[] orignBuff, byte[] targetBuff, long presentationTimeMs, int sequenceNum) {
return false;
}
public void onDestroy() {
}
}

View File

@ -0,0 +1,36 @@
package me.lake.librestreaming.filter.softvideofilter;
public class BaseSoftVideoFilter {
protected int SIZE_WIDTH;
protected int SIZE_HEIGHT;
protected int SIZE_Y;
protected int SIZE_TOTAL;
protected int SIZE_U;
protected int SIZE_UV;
public void onInit(int VWidth, int VHeight) {
SIZE_WIDTH = VWidth;
SIZE_HEIGHT = VHeight;
SIZE_Y = SIZE_HEIGHT * SIZE_WIDTH;
SIZE_UV = SIZE_HEIGHT * SIZE_WIDTH / 2;
SIZE_U = SIZE_UV / 2;
SIZE_TOTAL = SIZE_Y * 3 / 2;
}
/**
*
* @param orignBuff
* @param targetBuff
* @param presentationTimeMs
* @param sequenceNum
* @return false to use orignBuff,true to use targetBuff
*/
public boolean onFrame(byte[] orignBuff, byte[] targetBuff, long presentationTimeMs, int sequenceNum) {
return false;
}
public void onDestroy() {
}
}

View File

@ -0,0 +1,19 @@
package me.lake.librestreaming.model;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
public class MediaCodecGLWapper {
public EGLDisplay eglDisplay;
public EGLConfig eglConfig;
public EGLSurface eglSurface;
public EGLContext eglContext;
public int drawProgram;
public int drawTextureLoc;
public int drawPostionLoc;
public int drawTextureCoordLoc;
}

View File

@ -0,0 +1,25 @@
package me.lake.librestreaming.model;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
public class OffScreenGLWapper {
public EGLDisplay eglDisplay;
public EGLConfig eglConfig;
public EGLSurface eglSurface;
public EGLContext eglContext;
public int cam2dProgram;
public int cam2dTextureMatrix;
public int cam2dTextureLoc;
public int cam2dPostionLoc;
public int cam2dTextureCoordLoc;
public int camProgram;
public int camTextureLoc;
public int camPostionLoc;
public int camTextureCoordLoc;
}

View File

@ -0,0 +1,14 @@
package me.lake.librestreaming.model;
public class RESAudioBuff {
public boolean isReadyToFill;
public int audioFormat = -1;
public byte[] buff;
public RESAudioBuff(int audioFormat, int size) {
isReadyToFill = true;
this.audioFormat = audioFormat;
buff = new byte[size];
}
}

View File

@ -0,0 +1,212 @@
package me.lake.librestreaming.model;
import android.hardware.Camera;
public class RESConfig {
public static class FilterMode {
public static final int HARD = RESCoreParameters.FILTER_MODE_HARD;
public static final int SOFT = RESCoreParameters.FILTER_MODE_SOFT;
}
public static class RenderingMode {
public static final int NativeWindow = RESCoreParameters.RENDERING_MODE_NATIVE_WINDOW;
public static final int OpenGLES = RESCoreParameters.RENDERING_MODE_OPENGLES;
}
public static class DirectionMode {
public static final int FLAG_DIRECTION_FLIP_HORIZONTAL = RESCoreParameters.FLAG_DIRECTION_FLIP_HORIZONTAL;
public static final int FLAG_DIRECTION_FLIP_VERTICAL = RESCoreParameters.FLAG_DIRECTION_FLIP_VERTICAL;
public static final int FLAG_DIRECTION_ROATATION_0 = RESCoreParameters.FLAG_DIRECTION_ROATATION_0;
public static final int FLAG_DIRECTION_ROATATION_90 = RESCoreParameters.FLAG_DIRECTION_ROATATION_90;
public static final int FLAG_DIRECTION_ROATATION_180 = RESCoreParameters.FLAG_DIRECTION_ROATATION_180;
public static final int FLAG_DIRECTION_ROATATION_270 = RESCoreParameters.FLAG_DIRECTION_ROATATION_270;
}
private int filterMode;
private Size targetVideoSize;
private int videoBufferQueueNum;
private int bitRate;
private String rtmpAddr;
private int renderingMode;
private int defaultCamera;
private int frontCameraDirectionMode;
private int backCameraDirectionMode;
private int videoFPS;
private int videoGOP;
private boolean printDetailMsg;
private Size targetPreviewSize;
private RESConfig() {
}
public static RESConfig obtain() {
RESConfig res = new RESConfig();
res.setFilterMode(FilterMode.SOFT);
res.setRenderingMode(RenderingMode.NativeWindow);
res.setTargetVideoSize(new Size(1920, 1080));
res.setVideoFPS(60);
res.setVideoGOP(1);
res.setVideoBufferQueueNum(1);
res.setBitRate(10*1024*1024);
res.setPrintDetailMsg(false);
res.setDefaultCamera(Camera.CameraInfo.CAMERA_FACING_BACK);
res.setBackCameraDirectionMode(DirectionMode.FLAG_DIRECTION_ROATATION_0);
res.setFrontCameraDirectionMode(DirectionMode.FLAG_DIRECTION_ROATATION_0);
return res;
}
/**
* set the filter mode.
*
* @param filterMode {@link FilterMode}
*/
public void setFilterMode(int filterMode) {
this.filterMode = filterMode;
}
/**
* set the default camera to start stream
*/
public void setDefaultCamera(int defaultCamera) {
this.defaultCamera = defaultCamera;
}
/**
* set front camera rotation & flip
*
* @param frontCameraDirectionMode {@link DirectionMode}
*/
public void setFrontCameraDirectionMode(int frontCameraDirectionMode) {
this.frontCameraDirectionMode = frontCameraDirectionMode;
}
/**
* set front camera rotation & flip
*
* @param backCameraDirectionMode {@link DirectionMode}
*/
public void setBackCameraDirectionMode(int backCameraDirectionMode) {
this.backCameraDirectionMode = backCameraDirectionMode;
}
/**
* set renderingMode when using soft mode<br/>
* no use for hard mode
*
* @param renderingMode {@link RenderingMode}
*/
public void setRenderingMode(int renderingMode) {
this.renderingMode = renderingMode;
}
/**
* no use for now
*
* @param printDetailMsg
*/
public void setPrintDetailMsg(boolean printDetailMsg) {
this.printDetailMsg = printDetailMsg;
}
/**
* set the target video size.<br/>
* real video size may different from it.Depend on device.
*
* @param videoSize
*/
public void setTargetVideoSize(Size videoSize) {
targetVideoSize = videoSize;
}
/**
* set video buffer number for soft mode.<br/>
* num larger:video Smoother,more memory.
*
* @param num
*/
public void setVideoBufferQueueNum(int num) {
videoBufferQueueNum = num;
}
/**
* set video bitrate
*
* @param bitRate
*/
public void setBitRate(int bitRate) {
this.bitRate = bitRate;
}
public int getVideoFPS() {
return videoFPS;
}
public void setVideoFPS(int videoFPS) {
this.videoFPS = videoFPS;
}
public int getVideoGOP(){
return videoGOP;
}
public void setVideoGOP(int videoGOP){
this.videoGOP = videoGOP;
}
public int getVideoBufferQueueNum() {
return videoBufferQueueNum;
}
public int getBitRate() {
return bitRate;
}
public Size getTargetVideoSize() {
return targetVideoSize;
}
public int getFilterMode() {
return filterMode;
}
public int getDefaultCamera() {
return defaultCamera;
}
public int getBackCameraDirectionMode() {
return backCameraDirectionMode;
}
public int getFrontCameraDirectionMode() {
return frontCameraDirectionMode;
}
public int getRenderingMode() {
return renderingMode;
}
public String getRtmpAddr() {
return rtmpAddr;
}
public void setRtmpAddr(String rtmpAddr) {
this.rtmpAddr = rtmpAddr;
}
public boolean isPrintDetailMsg() {
return printDetailMsg;
}
public void setTargetPreviewSize(Size previewSize) {
targetPreviewSize = previewSize;
}
public Size getTargetPreviewSize() {
return targetPreviewSize;
}
}

View File

@ -0,0 +1,119 @@
package me.lake.librestreaming.model;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import me.lake.librestreaming.tools.LogTools;
import me.lake.librestreaming.ws.StreamAVOption;
public class RESCoreParameters {
public static final int FILTER_MODE_HARD = 1;
public static final int FILTER_MODE_SOFT = 2;
public static final int RENDERING_MODE_NATIVE_WINDOW = 1;
public static final int RENDERING_MODE_OPENGLES = 2;
/**
* same with jni
*/
public static final int FLAG_DIRECTION_FLIP_HORIZONTAL = 0x01;
public static final int FLAG_DIRECTION_FLIP_VERTICAL = 0x02;
public static final int FLAG_DIRECTION_ROATATION_0 = 0x10;
public static final int FLAG_DIRECTION_ROATATION_90 = 0x20;
public static final int FLAG_DIRECTION_ROATATION_180 = 0x40;
public static final int FLAG_DIRECTION_ROATATION_270 = 0x80;
public boolean done;
public boolean printDetailMsg;
public int filterMode;
public int renderingMode;
public String rtmpAddr;
public int frontCameraDirectionMode;
public int backCameraDirectionMode;
public boolean isPortrait;
public int previewVideoWidth;
public int previewVideoHeight;
public int videoWidth;
public int videoHeight;
public int videoFPS;
public int videoGOP;
public float cropRatio;
public int previewColorFormat;
public int previewBufferSize;
public int mediacodecAVCColorFormat;
public int mediacdoecAVCBitRate;
public int videoBufferQueueNum;
public int audioBufferQueueNum;
public int audioRecoderFormat;
public int audioRecoderSampleRate;
public int audioRecoderChannelConfig;
public int audioRecoderSliceSize;
public int audioRecoderSource;
public int audioRecoderBufferSize;
public int previewMaxFps;
public int previewMinFps;
public int mediacodecAVCFrameRate;
public int mediacodecAVCIFrameInterval;
public int mediacodecAVCProfile;
public int mediacodecAVClevel;
public int mediacodecAACProfile;
public int mediacodecAACSampleRate;
public int mediacodecAACChannelCount;
public int mediacodecAACBitRate;
public int mediacodecAACMaxInputSize;
//sender
public int senderQueueLength;
public RESCoreParameters() {
done = false;
printDetailMsg = false;
filterMode=-1;
videoWidth = StreamAVOption.videoWidth;
videoHeight = StreamAVOption.videoHeight;
previewVideoWidth = StreamAVOption.previewWidth;
previewVideoHeight = StreamAVOption.previewHeight;
videoFPS=StreamAVOption.videoFramerate;
videoGOP=StreamAVOption.videoGOP;
previewColorFormat = -1;
mediacodecAVCColorFormat = -1;
mediacdoecAVCBitRate = StreamAVOption.videoBitrate;
videoBufferQueueNum = -1;
audioBufferQueueNum = -1;
mediacodecAVCFrameRate = -1;
mediacodecAVCIFrameInterval = -1;
mediacodecAVCProfile = -1;
mediacodecAVClevel = -1;
mediacodecAACProfile = -1;
mediacodecAACSampleRate = -1;
mediacodecAACChannelCount = -1;
mediacodecAACBitRate = -1;
mediacodecAACMaxInputSize = -1;
}
public void dump() {
LogTools.e(this.toString());
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("ResParameter:");
Field[] fields = this.getClass().getDeclaredFields();
for (Field field : fields) {
if (Modifier.isStatic(field.getModifiers())) {
continue;
}
field.setAccessible(true);
try {
sb.append(field.getName());
sb.append('=');
sb.append(field.get(this));
sb.append(';');
} catch (IllegalAccessException e) {
}
}
return sb.toString();
}
}

View File

@ -0,0 +1,18 @@
package me.lake.librestreaming.model;
import java.util.Arrays;
public class RESVideoBuff {
public boolean isReadyToFill;
public int colorFormat = -1;
public byte[] buff;
public RESVideoBuff(int colorFormat, int size) {
isReadyToFill = true;
this.colorFormat = colorFormat;
buff = new byte[size];
Arrays.fill(buff, size/2, size, (byte) 127);
}
}

View File

@ -0,0 +1,19 @@
package me.lake.librestreaming.model;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
public class ScreenGLWapper {
public EGLDisplay eglDisplay;
public EGLConfig eglConfig;
public EGLSurface eglSurface;
public EGLContext eglContext;
public int drawProgram;
public int drawTextureLoc;
public int drawPostionLoc;
public int drawTextureCoordLoc;
}

View File

@ -0,0 +1,86 @@
package me.lake.librestreaming.model;
public final class Size {
/**
* Create a new immutable Size instance.
*
* @param width The width of the size, in pixels
* @param height The height of the size, in pixels
*/
public Size(int width, int height) {
mWidth = width;
mHeight = height;
}
/**
* Get the width of the size (in pixels).
*
* @return width
*/
public int getWidth() {
return mWidth;
}
/**
* Get the height of the size (in pixels).
*
* @return height
*/
public int getHeight() {
return mHeight;
}
/**
* Check if this size is equal to another size.
* <p>
* Two sizes are equal if and only if both their widths and heights are
* equal.
* </p>
* <p>
* A size object is never equal to any other type of object.
* </p>
*
* @return {@code true} if the objects were equal, {@code false} otherwise
*/
@Override
public boolean equals(final Object obj) {
if (obj == null) {
return false;
}
if (this == obj) {
return true;
}
if (obj instanceof Size) {
Size other = (Size) obj;
return mWidth == other.mWidth && mHeight == other.mHeight;
}
return false;
}
/**
* Return the size represented as a string with the format {@code "WxH"}
*
* @return string representation of the size
*/
@Override
public String toString() {
return mWidth + "x" + mHeight;
}
private static NumberFormatException invalidSize(String s) {
throw new NumberFormatException("Invalid Size: \"" + s + "\"");
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode() {
// assuming most sizes are <2^16, doing a rotate will give us perfect hashing
return mHeight ^ ((mWidth << (Integer.SIZE / 2)) | (mWidth >>> (Integer.SIZE / 2)));
}
private final int mWidth;
private final int mHeight;
}

View File

@ -0,0 +1,378 @@
package me.lake.librestreaming.render;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import java.util.Arrays;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;
import me.lake.librestreaming.tools.GLESTools;
public class GLESRender implements IRender {
private final Object syncRenderThread = new Object();
GLESRenderThread glesRenderThread;
@Override
public void create(SurfaceTexture visualSurfaceTexture, int pixelFormat, int pixelWidth, int pixelHeight, int visualWidth, int visualHeight) {
if (pixelFormat != ImageFormat.NV21) {
throw new IllegalArgumentException("GLESRender,pixelFormat only support NV21");
}
synchronized (syncRenderThread) {
glesRenderThread = new GLESRenderThread(visualSurfaceTexture,
pixelFormat,
pixelWidth,
pixelHeight,
visualWidth,
visualHeight);
glesRenderThread.start();
}
}
@Override
public void update(int visualWidth, int visualHeight) {
synchronized (syncRenderThread) {
glesRenderThread.updateVisualWH(visualWidth, visualHeight);
}
}
@Override
public void rendering(byte[] pixel) {
synchronized (syncRenderThread) {
glesRenderThread.updatePixel(pixel);
}
}
@Override
public void destroy(boolean releaseTexture) {
synchronized (syncRenderThread) {
glesRenderThread.quit(releaseTexture);
try {
glesRenderThread.join();
} catch (InterruptedException ignored) {
}
}
}
private static class GLESRenderThread extends Thread {
int mPixelWidth;
int mPixelHeight;
int mySize;
int mVisualWidth;
int mVisualHeight;
byte[] yTemp, uTemp, vTemp;
SurfaceTexture mVisualSurfaceTexture;
private final Object syncThread = new Object();
boolean quit = false;
boolean releaseTexture=true;
EGL10 mEgl;
EGLDisplay mEglDisplay;
EGLConfig mEglConfig;
EGLSurface mEglSurface;
EGLContext mEglContext;
int mProgram;
public GLESRenderThread(SurfaceTexture visualSurfaceTexture, int pixelFormat, int pixelWidth, int pixelHeight, int visualWidth, int visualHeight) {
quit = false;
mVisualSurfaceTexture = visualSurfaceTexture;
mPixelWidth = pixelWidth;
mPixelHeight = pixelHeight;
mySize = mPixelWidth * mPixelHeight;
mVisualWidth = visualWidth;
mVisualHeight = visualHeight;
yBuf = ByteBuffer.allocateDirect(mySize);
uBuf = ByteBuffer.allocateDirect(mySize >> 2);
vBuf = ByteBuffer.allocateDirect(mySize >> 2);
yTemp = new byte[mySize];
uTemp = new byte[mySize >> 2];
vTemp = new byte[mySize >> 2];
Arrays.fill(uTemp, (byte) 0x7F);
Arrays.fill(vTemp, (byte) 0x7F);
uBuf.position(0);
uBuf.put(uTemp).position(0);
vBuf.position(0);
vBuf.put(vTemp).position(0);
}
public void quit(boolean releaseTexture) {
synchronized (syncThread) {
this.releaseTexture = releaseTexture;
quit = true;
syncThread.notify();
}
}
public void updatePixel(byte[] pixel) {
synchronized (syncBuff) {
NV21TOYUV(pixel, yTemp, uTemp, vTemp, mPixelWidth, mPixelHeight);
yBuf.position(0);
yBuf.put(yTemp).position(0);
uBuf.position(0);
uBuf.put(uTemp).position(0);
vBuf.position(0);
vBuf.put(vTemp).position(0);
}
synchronized (syncThread) {
syncThread.notify();
}
}
public void updateVisualWH(int visualWidth, int visualHeight) {
mVisualWidth = visualWidth;
mVisualHeight = visualHeight;
}
@Override
public void run() {
initGLES();
mProgram = GLESTools.createProgram(vertexShaderCode, fragmentshaderCode);
initVertex();
initTexture();
while (!quit) {
drawFrame();
if (!mEgl.eglSwapBuffers(mEglDisplay, mEglSurface)) {
throw new RuntimeException("eglSwapBuffers,failed!");
}
synchronized (syncThread) {
try {
if(!quit) {
syncThread.wait();
}
} catch (InterruptedException ignored) {
}
}
}
releaseGLES();
if (releaseTexture) {
mVisualSurfaceTexture.release();
}
}
private void drawFrame() {
GLES20.glViewport(0, 0, mVisualWidth, mVisualHeight);
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
synchronized (syncBuff) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yTexture[0]);
GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0,
mPixelWidth,
mPixelHeight,
GLES20.GL_LUMINANCE,
GLES20.GL_UNSIGNED_BYTE,
yBuf);
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, uTexture[0]);
GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0,
mPixelWidth >> 1,
mPixelHeight >> 1,
GLES20.GL_LUMINANCE,
GLES20.GL_UNSIGNED_BYTE,
uBuf);
GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, vTexture[0]);
GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0,
mPixelWidth >> 1,
mPixelHeight >> 1,
GLES20.GL_LUMINANCE,
GLES20.GL_UNSIGNED_BYTE,
vBuf);
}
//=================================
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawIndices.length, GLES20.GL_UNSIGNED_SHORT, mDrawIndicesBuffer);
GLES20.glFinish();
}
private void initGLES() {
mEgl = (EGL10) EGLContext.getEGL();
mEglDisplay = mEgl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
if (EGL10.EGL_NO_DISPLAY == mEglDisplay) {
throw new RuntimeException("GLESRender,eglGetDisplay,failed:" + GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
int versions[] = new int[2];
if (!mEgl.eglInitialize(mEglDisplay, versions)) {
throw new RuntimeException("GLESRender,eglInitialize,failed:" + GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
int configsCount[] = new int[1];
EGLConfig configs[] = new EGLConfig[1];
int configSpec[] = new int[]{
EGL10.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_DEPTH_SIZE, 0,
EGL10.EGL_STENCIL_SIZE, 0,
EGL10.EGL_NONE
};
mEgl.eglChooseConfig(mEglDisplay, configSpec, configs, 1, configsCount);
if (configsCount[0] <= 0) {
throw new RuntimeException("GLESRender,eglChooseConfig,failed:" + GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
mEglConfig = configs[0];
mEglSurface = mEgl.eglCreateWindowSurface(mEglDisplay, mEglConfig, mVisualSurfaceTexture, null);
if (null == mEglSurface || EGL10.EGL_NO_SURFACE == mEglSurface) {
throw new RuntimeException("GLESRender,eglCreateWindowSurface,failed:" + GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
int contextSpec[] = new int[]{
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
mEglContext = mEgl.eglCreateContext(mEglDisplay, mEglConfig, EGL10.EGL_NO_CONTEXT, contextSpec);
if (EGL10.EGL_NO_CONTEXT == mEglContext) {
throw new RuntimeException("GLESRender,eglCreateContext,failed:" + GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
if (!mEgl.eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mEglContext)) {
throw new RuntimeException("GLESRender,eglMakeCurrent,failed:" + GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
}
private void initVertex() {
mSquareVerticesBuffer = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * squareVertices.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
mSquareVerticesBuffer.put(squareVertices);
mSquareVerticesBuffer.position(0);
mTextureCoordsBuffer = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * textureVertices.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
mTextureCoordsBuffer.put(textureVertices);
mTextureCoordsBuffer.position(0);
mDrawIndicesBuffer = ByteBuffer.allocateDirect(SHORT_SIZE_BYTES * drawIndices.length).
order(ByteOrder.nativeOrder()).
asShortBuffer();
mDrawIndicesBuffer.put(drawIndices);
mDrawIndicesBuffer.position(0);
}
private void initTexture() {
GLES20.glEnable(GLES20.GL_TEXTURE_2D);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
createTexture(mPixelWidth, mPixelHeight, GLES20.GL_LUMINANCE, yTexture);
createTexture(mPixelWidth >> 1, mPixelHeight >> 1, GLES20.GL_LUMINANCE, uTexture);
createTexture(mPixelWidth >> 1, mPixelHeight >> 1, GLES20.GL_LUMINANCE, vTexture);
GLES20.glUseProgram(mProgram);
sampleYLoaction = GLES20.glGetUniformLocation(mProgram, "samplerY");
sampleULoaction = GLES20.glGetUniformLocation(mProgram, "samplerU");
sampleVLoaction = GLES20.glGetUniformLocation(mProgram, "samplerV");
GLES20.glUniform1i(sampleYLoaction, 0);
GLES20.glUniform1i(sampleULoaction, 1);
GLES20.glUniform1i(sampleVLoaction, 2);
int aPostionLocation = GLES20.glGetAttribLocation(mProgram, "aPosition");
int aTextureCoordLocation = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
GLES20.glEnableVertexAttribArray(aPostionLocation);
GLES20.glVertexAttribPointer(aPostionLocation, SHAPE_COORD_PER_VERTEX,
GLES20.GL_FLOAT, false,
SHAPE_COORD_PER_VERTEX * 4, mSquareVerticesBuffer);
GLES20.glEnableVertexAttribArray(aTextureCoordLocation);
GLES20.glVertexAttribPointer(aTextureCoordLocation, TEXTURE_COORD_PER_VERTEX,
GLES20.GL_FLOAT, false,
TEXTURE_COORD_PER_VERTEX * 4, mTextureCoordsBuffer);
}
private void createTexture(int width, int height, int format, int[] texture) {
GLES20.glGenTextures(1, texture, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture[0]);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, format, width, height, 0, format, GLES20.GL_UNSIGNED_BYTE, null);
}
private void releaseGLES() {
GLES20.glDeleteProgram(mProgram);
GLES20.glDeleteTextures(1, yTexture, 0);
GLES20.glDeleteTextures(1, uTexture, 0);
GLES20.glDeleteTextures(1, vTexture, 0);
mEgl.eglMakeCurrent(mEglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
mEgl.eglDestroySurface(mEglDisplay, mEglSurface);
mEgl.eglDestroyContext(mEglDisplay, mEglContext);
mEgl.eglTerminate(mEglDisplay);
}
//Pixel Buff
private final Object syncBuff = new Object();
private ByteBuffer yBuf;
private ByteBuffer uBuf;
private ByteBuffer vBuf;
//texture
private int[] yTexture = new int[1];
private int[] uTexture = new int[1];
private int[] vTexture = new int[1];
private int sampleYLoaction;
private int sampleULoaction;
private int sampleVLoaction;
//shape vertices
private FloatBuffer mSquareVerticesBuffer;
private static float squareVertices[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
1.0f, 1.0f, 0.0f,
-1.0f, 1.0f, 0.0f
};
//texture coordinate vertices
private FloatBuffer mTextureCoordsBuffer;
private static float textureVertices[] = {
0.0f, 1.0f,
1.0f, 1.0f,
1.0f, 0.0f,
0.0f, 0.0f
};
//gl draw order
private ShortBuffer mDrawIndicesBuffer;
private static short drawIndices[] = {0, 1, 2, 0, 2, 3};
private static int FLOAT_SIZE_BYTES = 4;
private static int SHORT_SIZE_BYTES = 2;
private static final int SHAPE_COORD_PER_VERTEX = 3;
private static final int TEXTURE_COORD_PER_VERTEX = 2;
private static String vertexShaderCode =
"attribute vec4 aPosition;\n" +
"attribute vec2 aTextureCoord;\n" +
"varying vec2 vTextureCoord;\n" +
"void main(){\n" +
" gl_Position= aPosition;\n" +
" vTextureCoord = aTextureCoord;\n" +
"}";
private static String fragmentshaderCode =
"varying lowp vec2 vTextureCoord;\n" +
"uniform sampler2D samplerY;\n" +
"uniform sampler2D samplerU;\n" +
"uniform sampler2D samplerV;\n" +
"const mediump mat3 yuv2rgb = mat3(1,1,1,0,-0.39465,2.03211,1.13983,-0.5806,0);\n" +
"void main(){\n" +
" mediump vec3 yuv;\n" +
" yuv.x = texture2D(samplerY,vTextureCoord).r;\n" +
" yuv.y = texture2D(samplerU,vTextureCoord).r - 0.5;\n" +
" yuv.z = texture2D(samplerV,vTextureCoord).r - 0.5;\n" +
" gl_FragColor = vec4(yuv2rgb*yuv,1);\n" +
"}";
}
@SuppressWarnings("all")
private static native void NV21TOYUV(byte[] src, byte[] dstY, byte[] dstU, byte[] dstV, int width, int height);
}

View File

@ -0,0 +1,14 @@
package me.lake.librestreaming.render;
import android.graphics.SurfaceTexture;
public interface IRender {
void create(SurfaceTexture visualSurfaceTexture, int pixelFormat, int pixelWidth, int pixelHeight, int visualWidth, int visualHeight);
void update(int visualWidth, int visualHeight);
void rendering(byte[] pixel);
void destroy(boolean releaseTexture);
}

View File

@ -0,0 +1,50 @@
package me.lake.librestreaming.render;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.view.Surface;
import me.lake.librestreaming.tools.LogTools;
public class NativeRender implements IRender {
Surface mVisualSurface;
int mPixelWidth;
int mPixelHeight;
int mPixelSize;
@Override
public void create(SurfaceTexture visualSurfaceTexture, int pixelFormat, int pixelWidth, int pixelHeight, int visualWidth, int visualHeight) {
if (pixelFormat != ImageFormat.NV21) {
throw new IllegalArgumentException("NativeRender,pixelFormat only support NV21");
}
mVisualSurface = new Surface(visualSurfaceTexture);
mPixelWidth = pixelWidth;
mPixelHeight = pixelHeight;
mPixelSize = (3 * pixelWidth * pixelHeight) / 2;
}
@Override
public void update(int visualWidth, int visualHeight) {
}
@Override
public void rendering(byte[] pixel) {
if (mVisualSurface != null && mVisualSurface.isValid()) {
renderingSurface(mVisualSurface, pixel, mPixelWidth, mPixelHeight, mPixelSize);
} else {
LogTools.d("NativeRender,rendering()invalid Surface");
}
}
@Override
public void destroy(boolean releaseTexture) {
if(releaseTexture) {
mVisualSurface.release();
}
}
@SuppressWarnings("all")
private native void renderingSurface(Surface surface, byte[] pixels, int w, int h, int s);
}

View File

@ -0,0 +1,124 @@
package me.lake.librestreaming.rtmp;
import java.util.ArrayList;
import me.lake.librestreaming.model.RESCoreParameters;
/**
* This class is able to generate a FLVTAG in accordance with Adobe Flash Video File Format
* Specification v10.1 Annex E.5 with limited types available.
*/
public class FLvMetaData {
private static final String Name = "onMetaData";
private static final int ScriptData = 18;
private static final byte[] TS_SID = {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00};
private static final byte[] ObjEndMarker = {0x00, 0x00, 0x09};
private static final int EmptySize = 21;
private ArrayList<byte[]> MetaData;
private int DataSize;
private int pointer;
private byte[] MetaDataFrame;
public FLvMetaData() {
MetaData = new ArrayList<>();
DataSize = 0;
}
public FLvMetaData(RESCoreParameters coreParameters) {
this();
//Audio
//AAC
setProperty("audiocodecid", 10);
switch (coreParameters.mediacodecAACBitRate) {
case 32 * 1024:
setProperty("audiodatarate", 32);
break;
case 48 * 1024:
setProperty("audiodatarate", 48);
break;
case 64 * 1024:
setProperty("audiodatarate", 64);
break;
}
switch (coreParameters.mediacodecAACSampleRate) {
case 44100:
setProperty("audiosamplerate", 44100);
break;
default:
break;
}
//Video
//h264
setProperty("videocodecid", 7);
setProperty("framerate", coreParameters.mediacodecAVCFrameRate);
setProperty("width", coreParameters.videoWidth);
setProperty("height", coreParameters.videoHeight);
}
public void setProperty(String Key, int value) {
addProperty(toFlvString(Key), (byte) 0, toFlvNum(value));
}
public void setProperty(String Key, String value) {
addProperty(toFlvString(Key), (byte) 2, toFlvString(value));
}
private void addProperty(byte[] Key, byte datatype, byte[] data) {
int Propertysize = Key.length + 1 + data.length;
byte[] Property = new byte[Propertysize];
System.arraycopy(Key, 0, Property, 0, Key.length);
Property[Key.length] = datatype;
System.arraycopy(data, 0, Property, Key.length + 1, data.length);
MetaData.add(Property);
DataSize += Propertysize;
}
public byte[] getMetaData() {
MetaDataFrame = new byte[DataSize + EmptySize];
pointer = 0;
//SCRIPTDATA.name
Addbyte(2);
AddbyteArray(toFlvString(Name));
//SCRIPTDATA.value ECMA array
Addbyte(8);
AddbyteArray(toUI(MetaData.size(), 4));
for (byte[] Property : MetaData) {
AddbyteArray(Property);
}
AddbyteArray(ObjEndMarker);
return MetaDataFrame;
}
private void Addbyte(int value) {
MetaDataFrame[pointer] = (byte) value;
pointer++;
}
private void AddbyteArray(byte[] value) {
System.arraycopy(value, 0, MetaDataFrame, pointer, value.length);
pointer += value.length;
}
private byte[] toFlvString(String text) {
byte[] FlvString = new byte[text.length() + 2];
System.arraycopy(toUI(text.length(), 2), 0, FlvString, 0, 2);
System.arraycopy(text.getBytes(), 0, FlvString, 2, text.length());
return FlvString;
}
private byte[] toUI(long value, int bytes) {
byte[] UI = new byte[bytes];
for (int i = 0; i < bytes; i++) {
UI[bytes - 1 - i] = (byte) (value >> (8 * i) & 0xff);
}
return UI;
}
private byte[] toFlvNum(double value) {
long tmp = Double.doubleToLongBits(value);
return toUI(tmp, 8);
}
}

View File

@ -0,0 +1,27 @@
package me.lake.librestreaming.rtmp;
public class RESFlvData {
public final static int FLV_RTMP_PACKET_TYPE_VIDEO = 9;
public final static int FLV_RTMP_PACKET_TYPE_AUDIO = 8;
public final static int FLV_RTMP_PACKET_TYPE_INFO = 18;
public final static int NALU_TYPE_IDR = 5;
public boolean droppable;
public int dts;//解码时间戳
public byte[] byteBuffer; //数据
public int size; //字节长度
public int flvTagType; //视频和音频的分类
public int videoFrameType;
public boolean isKeyframe() {
return videoFrameType == NALU_TYPE_IDR;
}
}

View File

@ -0,0 +1,6 @@
package me.lake.librestreaming.rtmp;
public interface RESFlvDataCollecter {
void collect(RESFlvData flvData, int type);
}

View File

@ -0,0 +1,308 @@
package me.lake.librestreaming.rtmp;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import me.lake.librestreaming.client.CallbackDelivery;
import me.lake.librestreaming.core.RESByteSpeedometer;
import me.lake.librestreaming.core.RESFrameRateMeter;
import me.lake.librestreaming.core.listener.RESConnectionListener;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.tools.LogTools;
public class RESRtmpSender {
private static final int TIMEGRANULARITY = 3000;
public static final int FROM_AUDIO = 8;
public static final int FROM_VIDEO = 6;
private WorkHandler workHandler;
private HandlerThread workHandlerThread;
private final Object syncOp = new Object();
public void prepare(RESCoreParameters coreParameters) {
synchronized (syncOp) {
workHandlerThread = new HandlerThread("RESRtmpSender,workHandlerThread");
workHandlerThread.start();
workHandler = new WorkHandler(coreParameters.senderQueueLength,
new FLvMetaData(coreParameters),
workHandlerThread.getLooper());
}
}
public void setConnectionListener(RESConnectionListener connectionListener) {
synchronized (syncOp) {
workHandler.setConnectionListener(connectionListener);
}
}
public String getServerIpAddr() {
synchronized (syncOp) {
return workHandler == null ? null : workHandler.getServerIpAddr();
}
}
public float getSendFrameRate() {
synchronized (syncOp) {
return workHandler == null ? 0 : workHandler.getSendFrameRate();
}
}
public float getSendBufferFreePercent() {
synchronized (syncOp) {
return workHandler == null ? 0 : workHandler.getSendBufferFreePercent();
}
}
public void start(String rtmpAddr) {
synchronized (syncOp) {
workHandler.sendStart(rtmpAddr);
}
}
public void feed(RESFlvData flvData, int type) {
synchronized (syncOp) {
workHandler.sendFood(flvData, type);
}
}
public void stop() {
synchronized (syncOp) {
workHandler.sendStop();
}
}
public void destroy() {
synchronized (syncOp) {
workHandler.removeCallbacksAndMessages(null);
//workHandlerThread.quit();
workHandler.sendStop();
workHandlerThread.quitSafely();
/**
* do not wait librtmp to quit
*/
// try {
// workHandlerThread.join();
// } catch (InterruptedException ignored) {
// }
}
}
public int getTotalSpeed() {
synchronized (syncOp) {
if (workHandler != null) {
return workHandler.getTotalSpeed();
} else {
return 0;
}
}
}
public WorkHandler getWorkHandler(){
return workHandler;
}
public static class WorkHandler extends Handler {
private final static int MSG_START = 1;
private final static int MSG_WRITE = 2;
private final static int MSG_STOP = 3;
private long jniRtmpPointer = 0;
private String serverIpAddr = null;
private int maxQueueLength;
private int writeMsgNum = 0;
private final Object syncWriteMsgNum = new Object();
private RESByteSpeedometer videoByteSpeedometer = new RESByteSpeedometer(TIMEGRANULARITY);
private RESByteSpeedometer audioByteSpeedometer = new RESByteSpeedometer(TIMEGRANULARITY);
private RESFrameRateMeter sendFrameRateMeter = new RESFrameRateMeter();
private FLvMetaData fLvMetaData;
private RESConnectionListener connectionListener;
private final Object syncConnectionListener = new Object();
private int errorTime = 0;
private enum STATE {
IDLE,
RUNNING,
STOPPED
}
private STATE state;
WorkHandler(int maxQueueLength, FLvMetaData fLvMetaData, Looper looper) {
super(looper);
this.maxQueueLength = maxQueueLength;
this.fLvMetaData = fLvMetaData;
state = STATE.IDLE;
}
public String getServerIpAddr() {
return serverIpAddr;
}
public float getSendFrameRate() {
return sendFrameRateMeter.getFps();
}
public float getSendBufferFreePercent() {
synchronized (syncWriteMsgNum) {
float res = (float) (maxQueueLength - writeMsgNum) / (float) maxQueueLength;
return res <= 0 ? 0f : res;
}
}
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MSG_START:
if (state == STATE.RUNNING) {
break;
}
sendFrameRateMeter.reSet();
LogTools.d("RESRtmpSender,WorkHandler,tid=" + Thread.currentThread().getId());
jniRtmpPointer = RtmpClient.open((String) msg.obj, true);
final int openR = jniRtmpPointer == 0 ? 1 : 0;
if (openR == 0) {
serverIpAddr = RtmpClient.getIpAddr(jniRtmpPointer);
}
synchronized (syncConnectionListener) {
if (connectionListener != null) {
CallbackDelivery.i().post(new Runnable() {
@Override
public void run() {
connectionListener.onOpenConnectionResult(openR);
}
});
}
}
if (jniRtmpPointer == 0) {
break;
} else {
byte[] MetaData = fLvMetaData.getMetaData();
RtmpClient.write(jniRtmpPointer,
MetaData,
MetaData.length,
RESFlvData.FLV_RTMP_PACKET_TYPE_INFO, 0);
state = STATE.RUNNING;
}
break;
case MSG_STOP:
if (state == STATE.STOPPED || jniRtmpPointer == 0) {
break;
}
errorTime = 0;
final int closeR = RtmpClient.close(jniRtmpPointer);
serverIpAddr = null;
synchronized (syncConnectionListener) {
if (connectionListener != null) {
CallbackDelivery.i().post(new Runnable() {
@Override
public void run() {
connectionListener.onCloseConnectionResult(closeR);
}
});
}
}
state = STATE.STOPPED;
break;
case MSG_WRITE:
synchronized (syncWriteMsgNum) {
--writeMsgNum;
}
if (state != STATE.RUNNING) {
break;
}
if(mListener!=null){
mListener.getBufferFree(getSendBufferFreePercent());
}
RESFlvData flvData = (RESFlvData) msg.obj;
if (writeMsgNum >= (maxQueueLength * 3 / 4) && flvData.flvTagType == RESFlvData.FLV_RTMP_PACKET_TYPE_VIDEO && flvData.droppable) {
LogTools.d("senderQueue is crowded,abandon video");
break;
}
final int res = RtmpClient.write(jniRtmpPointer, flvData.byteBuffer, flvData.byteBuffer.length, flvData.flvTagType, flvData.dts);
if (res == 0) {
errorTime = 0;
if (flvData.flvTagType == RESFlvData.FLV_RTMP_PACKET_TYPE_VIDEO) {
videoByteSpeedometer.gain(flvData.size);
sendFrameRateMeter.count();
} else {
audioByteSpeedometer.gain(flvData.size);
}
} else {
++errorTime;
synchronized (syncConnectionListener) {
if (connectionListener != null) {
CallbackDelivery.i().post(new RESConnectionListener.RESWriteErrorRunable(connectionListener, res));
}
}
}
break;
default:
break;
}
}
public void sendStart(String rtmpAddr) {
this.removeMessages(MSG_START);
synchronized (syncWriteMsgNum) {
this.removeMessages(MSG_WRITE);
writeMsgNum = 0;
}
this.sendMessage(this.obtainMessage(MSG_START, rtmpAddr));
}
public void sendStop() {
this.removeMessages(MSG_STOP);
synchronized (syncWriteMsgNum) {
this.removeMessages(MSG_WRITE);
writeMsgNum = 0;
}
this.sendEmptyMessage(MSG_STOP);
}
public void sendFood(RESFlvData flvData, int type) {
synchronized (syncWriteMsgNum) {
//LAKETODO optimize
if (writeMsgNum <= maxQueueLength) {
this.sendMessage(this.obtainMessage(MSG_WRITE, type, 0, flvData));
++writeMsgNum;
} else {
LogTools.d("senderQueue is full,abandon");
}
}
}
public void setConnectionListener(RESConnectionListener connectionListener) {
synchronized (syncConnectionListener) {
this.connectionListener = connectionListener;
}
}
public int getTotalSpeed() {
return getVideoSpeed() + getAudioSpeed();
}
public int getVideoSpeed() {
return videoByteSpeedometer.getSpeed();
}
public int getAudioSpeed() {
return audioByteSpeedometer.getSpeed();
}
private BufferFreeListener mListener=null;
public interface BufferFreeListener{
void getBufferFree(float free);
}
public void setBufferFreeListener(BufferFreeListener listener){
mListener=listener;
}
}
}

View File

@ -0,0 +1,25 @@
package me.lake.librestreaming.rtmp;
public class RtmpClient {
static {
System.loadLibrary("resrtmp");
}
/**
* @param url
* @param isPublishMode
* @return rtmpPointer ,pointer to native rtmp struct
*/
public static native long open(String url, boolean isPublishMode);
public static native int read(long rtmpPointer, byte[] data, int offset, int size);
public static native int write(long rtmpPointer, byte[] data, int size, int type, int ts);
public static native int close(long rtmpPointer);
public static native String getIpAddr(long rtmpPointer);
}

View File

@ -0,0 +1,19 @@
package me.lake.librestreaming.tools;
import android.graphics.ImageFormat;
import android.media.MediaCodecInfo;
public class BuffSizeCalculator {
public static int calculator(int width, int height, int colorFormat) {
switch (colorFormat) {
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
case ImageFormat.NV21:
case ImageFormat.YV12:
return width * height * 3 / 2;
default:
return -1;
}
}
}

Some files were not shown because too many files have changed in this diff Show More