This commit is contained in:
2022-03-11 08:43:32 +08:00
commit 8f9ae202db
148 changed files with 23318 additions and 0 deletions

View File

@@ -0,0 +1,34 @@
package me.lake.librestreaming.client;
import android.os.Handler;
import android.os.Looper;
import java.util.concurrent.Executor;
public class CallbackDelivery {
static private CallbackDelivery instance;
private final Executor mCallbackPoster;
private final Handler handler = new Handler(Looper.getMainLooper());
public static CallbackDelivery i() {
return instance == null ? instance = new CallbackDelivery() : instance;
}
private CallbackDelivery() {
mCallbackPoster = new Executor() {
@Override
public void execute(Runnable command) {
handler.post(command);
}
};
}
public void post(Runnable runnable) {
mCallbackPoster.execute(runnable);
}
public void postDelayed(Runnable runnable, long time) {
handler.postDelayed(runnable,time);
}
}

View File

@@ -0,0 +1,6 @@
package me.lake.librestreaming.client;
public class Constants {
public static String VERSION = "0.1.0";
}

View File

@@ -0,0 +1,134 @@
package me.lake.librestreaming.client;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import me.lake.librestreaming.core.RESSoftAudioCore;
import me.lake.librestreaming.filter.softaudiofilter.BaseSoftAudioFilter;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.tools.LogTools;
public class RESAudioClient {
RESCoreParameters resCoreParameters;
private final Object syncOp = new Object();
private AudioRecordThread audioRecordThread;
private AudioRecord audioRecord;
private byte[] audioBuffer;
private RESSoftAudioCore softAudioCore;
public RESAudioClient(RESCoreParameters parameters) {
resCoreParameters = parameters;
}
public boolean prepare(RESConfig resConfig) {
synchronized (syncOp) {
resCoreParameters.audioBufferQueueNum = 5;
softAudioCore = new RESSoftAudioCore(resCoreParameters);
if (!softAudioCore.prepare(resConfig)) {
LogTools.e("RESAudioClient,prepare");
return false;
}
resCoreParameters.audioRecoderFormat = AudioFormat.ENCODING_PCM_16BIT;
resCoreParameters.audioRecoderChannelConfig = AudioFormat.CHANNEL_IN_MONO;
resCoreParameters.audioRecoderSliceSize = resCoreParameters.mediacodecAACSampleRate / 10;
resCoreParameters.audioRecoderBufferSize = resCoreParameters.audioRecoderSliceSize * 2;
resCoreParameters.audioRecoderSource = MediaRecorder.AudioSource.DEFAULT;
resCoreParameters.audioRecoderSampleRate = resCoreParameters.mediacodecAACSampleRate;
prepareAudio();
return true;
}
}
public boolean start(RESFlvDataCollecter flvDataCollecter) {
synchronized (syncOp) {
softAudioCore.start(flvDataCollecter);
audioRecord.startRecording();
audioRecordThread = new AudioRecordThread();
audioRecordThread.start();
LogTools.d("RESAudioClient,start()");
return true;
}
}
public boolean stop() {
synchronized (syncOp) {
if(audioRecordThread != null) {
audioRecordThread.quit();
try {
audioRecordThread.join();
} catch (InterruptedException ignored) {
}
softAudioCore.stop();
audioRecordThread = null;
audioRecord.stop();
return true;
}
return true;
}
}
public boolean destroy() {
synchronized (syncOp) {
audioRecord.release();
return true;
}
}
public void setSoftAudioFilter(BaseSoftAudioFilter baseSoftAudioFilter) {
softAudioCore.setAudioFilter(baseSoftAudioFilter);
}
public BaseSoftAudioFilter acquireSoftAudioFilter() {
return softAudioCore.acquireAudioFilter();
}
public void releaseSoftAudioFilter() {
softAudioCore.releaseAudioFilter();
}
private boolean prepareAudio() {
int minBufferSize = AudioRecord.getMinBufferSize(resCoreParameters.audioRecoderSampleRate,
resCoreParameters.audioRecoderChannelConfig,
resCoreParameters.audioRecoderFormat);
audioRecord = new AudioRecord(resCoreParameters.audioRecoderSource,
resCoreParameters.audioRecoderSampleRate,
resCoreParameters.audioRecoderChannelConfig,
resCoreParameters.audioRecoderFormat,
minBufferSize * 5);
audioBuffer = new byte[resCoreParameters.audioRecoderBufferSize];
if (AudioRecord.STATE_INITIALIZED != audioRecord.getState()) {
LogTools.e("audioRecord.getState()!=AudioRecord.STATE_INITIALIZED!");
return false;
}
if (AudioRecord.SUCCESS != audioRecord.setPositionNotificationPeriod(resCoreParameters.audioRecoderSliceSize)) {
LogTools.e("AudioRecord.SUCCESS != audioRecord.setPositionNotificationPeriod(" + resCoreParameters.audioRecoderSliceSize + ")");
return false;
}
return true;
}
class AudioRecordThread extends Thread {
private boolean isRunning = true;
AudioRecordThread() {
isRunning = true;
}
public void quit() {
isRunning = false;
}
@Override
public void run() {
LogTools.d("AudioRecordThread,tid=" + Thread.currentThread().getId());
while (isRunning) {
int size = audioRecord.read(audioBuffer, 0, audioBuffer.length);
if (isRunning && softAudioCore != null && size > 0) {
softAudioCore.queueAudio(audioBuffer);
}
}
}
}
}

View File

@@ -0,0 +1,541 @@
package me.lake.librestreaming.client;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.os.Build;
import android.widget.Toast;
import java.lang.ref.WeakReference;
import me.lake.librestreaming.core.listener.RESConnectionListener;
import me.lake.librestreaming.core.listener.RESScreenShotListener;
import me.lake.librestreaming.core.listener.RESVideoChangeListener;
import me.lake.librestreaming.encoder.MediaVideoEncoder;
import me.lake.librestreaming.filter.hardvideofilter.BaseHardVideoFilter;
import me.lake.librestreaming.filter.softaudiofilter.BaseSoftAudioFilter;
import me.lake.librestreaming.filter.softvideofilter.BaseSoftVideoFilter;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.model.Size;
import me.lake.librestreaming.rtmp.RESFlvData;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.rtmp.RESRtmpSender;
import me.lake.librestreaming.tools.LogTools;
public class RESClient {
public RESVideoClient videoClient;
private RESAudioClient audioClient;
private final Object SyncOp;
//parameters
RESCoreParameters coreParameters;
private RESRtmpSender rtmpSender;
private RESFlvDataCollecter dataCollecter;
//是否在推流
public boolean isStreaming = false;
private WeakReference<Activity> mActivity;
public RESClient() {
SyncOp = new Object();
coreParameters = new RESCoreParameters();
CallbackDelivery.i();
}
public void setContext(Context context){
if(context instanceof Activity){
this.mActivity = new WeakReference<Activity>((Activity) context);
}
}
/**
* prepare to stream
*
* @param resConfig config
* @return true if prepare success
*/
public boolean prepare(RESConfig resConfig) {
synchronized (SyncOp) {
checkDirection(resConfig);
coreParameters.filterMode = resConfig.getFilterMode();
coreParameters.rtmpAddr = resConfig.getRtmpAddr();
coreParameters.printDetailMsg = resConfig.isPrintDetailMsg();
coreParameters.senderQueueLength = 200;//150
videoClient = new RESVideoClient(coreParameters);
videoClient.setActivity(mActivity.get());
audioClient = new RESAudioClient(coreParameters);
if (!videoClient.prepare(resConfig)) {
LogTools.d("!!!!!videoClient.prepare()failed");
LogTools.d(coreParameters.toString());
return false;
}
if (!audioClient.prepare(resConfig)) {
LogTools.d("!!!!!audioClient.prepare()failed");
LogTools.d(coreParameters.toString());
return false;
}
rtmpSender = new RESRtmpSender();
rtmpSender.prepare(coreParameters);
dataCollecter = new RESFlvDataCollecter() {
@Override
public void collect(RESFlvData flvData, int type) {
if(rtmpSender != null){
rtmpSender.feed(flvData, type);
}
}
};
coreParameters.done = true;
LogTools.d("===INFO===coreParametersReady:");
LogTools.d(coreParameters.toString());
return true;
}
}
/**
* start streaming
*/
public void startStreaming(String rtmpAddr) {
isStreaming = true;
synchronized (SyncOp) {
try {
videoClient.startStreaming(dataCollecter);
rtmpSender.start(rtmpAddr == null ? coreParameters.rtmpAddr : rtmpAddr);
audioClient.start(dataCollecter);
LogTools.d("RESClient,startStreaming()");
}catch (Exception e){
if(mActivity.get() !=null){
Toast.makeText(mActivity.get(),"可能没有权限",Toast.LENGTH_LONG).show();
mActivity.get().finish();
}
}
}
}
/**
* start streaming
*/
public void startStreaming() {
isStreaming = true;
synchronized (SyncOp) {
videoClient.startStreaming(dataCollecter);
rtmpSender.start(coreParameters.rtmpAddr);
audioClient.start(dataCollecter);
LogTools.d("RESClient,startStreaming()");
}
}
/**
* stop streaming
*/
public void stopStreaming() {
isStreaming = false;
synchronized (SyncOp) {
videoClient.stopStreaming();
audioClient.stop();
rtmpSender.stop();
LogTools.d("RESClient,stopStreaming()");
}
}
/**
* clean up
*/
public void destroy() {
synchronized (SyncOp) {
rtmpSender.destroy();
videoClient.destroy();
audioClient.destroy();
rtmpSender = null;
videoClient = null;
audioClient = null;
LogTools.d("RESClient,destroy()");
}
}
/**
* call it AFTER {@link #prepare(RESConfig)}
*
* @param surfaceTexture to rendering preview
*/
public void startPreview(SurfaceTexture surfaceTexture, int visualWidth, int visualHeight) {
if(videoClient != null){
videoClient.startPreview(surfaceTexture, visualWidth, visualHeight);
}
LogTools.d("RESClient,startPreview()");
}
public void updatePreview(int visualWidth, int visualHeight) {
if(videoClient != null){
videoClient.updatePreview(visualWidth, visualHeight);
}
LogTools.d("RESClient,updatePreview()");
}
public Camera getCamera(){
return videoClient.getCamera();
}
public int getCameraId(){
return videoClient.currentCameraIndex;
}
/**
*
* @param releaseTexture true if you won`t reuse this surfaceTexture later
*/
public void stopPreview(boolean releaseTexture) {
if(videoClient != null){
videoClient.stopPreview(releaseTexture);
}
LogTools.d("RESClient,stopPreview()");
}
/**
* change camera on running.<br/>
*/
public boolean swapCamera() {
synchronized (SyncOp) {
LogTools.d("RESClient,swapCamera()");
return videoClient.swapCamera();
}
}
/**
* only for soft filter mode.<br/>
* use it to update filter property.<br/>
* call it with {@link #releaseSoftVideoFilter()}<br/>
* make sure to release it in 3ms
*
* @return the videofilter in use
*/
public BaseSoftVideoFilter acquireSoftVideoFilter() {
return videoClient.acquireSoftVideoFilter();
}
/**
* only for soft filter mode.<br/>
* call it with {@link #acquireSoftVideoFilter()}
*/
public void releaseSoftVideoFilter() {
videoClient.releaseSoftVideoFilter();
}
/**
* get the real video size,call after prepare()
*
* @return
*/
public Size getVideoSize() {
return new Size(coreParameters.videoWidth, coreParameters.videoHeight);
}
/**
* get the rtmp server ip addr ,call after connect success.
*
* @return
*/
public String getServerIpAddr() {
synchronized (SyncOp) {
return rtmpSender == null ? null : rtmpSender.getServerIpAddr();
}
}
/**
* get the real draw frame rate of screen
*
* @return
*/
public float getDrawFrameRate() {
synchronized (SyncOp) {
return videoClient == null ? 0 : videoClient.getDrawFrameRate();
}
}
/**
* get the rate of video frame sent by rtmp
*
* @return
*/
public float getSendFrameRate() {
synchronized (SyncOp) {
return rtmpSender == null ? 0 : rtmpSender.getSendFrameRate();
}
}
/**
* get free percent of send buffer
* return ~0.0 if the netspeed is not enough or net is blocked.
* @return
*/
public float getSendBufferFreePercent() {
synchronized (SyncOp) {
return rtmpSender == null ? 0 : rtmpSender.getSendBufferFreePercent();
}
}
/**
* only for soft filter mode.<br/>
* set videofilter.<br/>
* can be called Repeatedly.<br/>
* do NOT call it between {@link #acquireSoftVideoFilter()} & {@link #releaseSoftVideoFilter()}
*
* @param baseSoftVideoFilter videofilter to apply
*/
public void setSoftVideoFilter(BaseSoftVideoFilter baseSoftVideoFilter) {
videoClient.setSoftVideoFilter(baseSoftVideoFilter);
}
/**
* only for hard filter mode.<br/>
* use it to update filter property.<br/>
* call it with {@link #releaseHardVideoFilter()}<br/>
* make sure to release it in 3ms
*
* @return the videofilter in use
*/
public BaseHardVideoFilter acquireHardVideoFilter() {
return videoClient.acquireHardVideoFilter();
}
/**
* only for hard filter mode.<br/>
* call it with {@link #acquireHardVideoFilter()}
*/
public void releaseHardVideoFilter() {
videoClient.releaseHardVideoFilter();
}
/**
* only for hard filter mode.<br/>
* set videofilter.<br/>
* can be called Repeatedly.<br/>
* do NOT call it between {@link #acquireHardVideoFilter()} & {@link #acquireHardVideoFilter()}
*
* @param baseHardVideoFilter videofilter to apply
*/
public void setHardVideoFilter(BaseHardVideoFilter baseHardVideoFilter) {
videoClient.setHardVideoFilter(baseHardVideoFilter);
}
/**
* set audiofilter.<br/>
* can be called Repeatedly.<br/>
* do NOT call it between {@link #acquireSoftAudioFilter()} & {@link #releaseSoftAudioFilter()}
*
* @param baseSoftAudioFilter audiofilter to apply
*/
public void setSoftAudioFilter(BaseSoftAudioFilter baseSoftAudioFilter) {
audioClient.setSoftAudioFilter(baseSoftAudioFilter);
}
/**
* use it to update filter property.<br/>
* call it with {@link #releaseSoftAudioFilter()}<br/>
* make sure to release it in 3ms
*
* @return the audiofilter in use
*/
public BaseSoftAudioFilter acquireSoftAudioFilter() {
return audioClient.acquireSoftAudioFilter();
}
/**
* call it with {@link #acquireSoftAudioFilter()}
*/
public void releaseSoftAudioFilter() {
audioClient.releaseSoftAudioFilter();
}
/**
* get video & audio real send Speed
*
* @return speed in B/s
*/
public int getAVSpeed() {
synchronized (SyncOp) {
return rtmpSender == null ? 0 : rtmpSender.getTotalSpeed();
}
}
/**
* call it AFTER {@link #prepare(RESConfig)}
*
* @param connectionListener
*/
public void setConnectionListener(RESConnectionListener connectionListener) {
if(rtmpSender != null) {
rtmpSender.setConnectionListener(connectionListener);
}
}
/**
* listener for video size change
* @param videoChangeListener
*/
public void setVideoChangeListener(RESVideoChangeListener videoChangeListener) {
if(videoClient != null){
videoClient.setVideoChangeListener(videoChangeListener);
}
}
/**
* get the param of video,audio,mediacodec
*
* @return info
*/
public String getConfigInfo() {
return coreParameters.toString();
}
/**
* set zoom by percent [0.0f,1.0f]
*
* @param targetPercent zoompercent
*/
public boolean setZoomByPercent(float targetPercent) {
return videoClient.setZoomByPercent(targetPercent);
}
/**
* toggle flash light
*
* @return true if operation success
*/
public boolean toggleFlashLight() {
return videoClient.toggleFlashLight();
}
public void takeScreenShot(RESScreenShotListener listener) {
videoClient.takeScreenShot(listener);
}
/**
* Change video bitrate on the fly<br/>
* call between {@link #startStreaming()} & {@link #stopStreaming()}
* @param bitrate target bitrate bits/sec
*/
@TargetApi(Build.VERSION_CODES.KITKAT)
public void reSetVideoBitrate(int bitrate) {
videoClient.reSetVideoBitrate(bitrate);
}
/**
* get current bitrate
* @return current bitrate bits/sec
*/
public int getVideoBitrate() {
return videoClient.getVideoBitrate();
}
/**
* update fps on the fly.
* @param fps
*/
public void reSetVideoFPS(int fps) {
videoClient.reSetVideoFPS(fps);
}
/**
* only work with hard mode.
* reset video size on the fly.
* may restart camera.
* will restart mediacodec.
* will not interrupt streaming
* @param targetVideoSize
*/
public void reSetVideoSize(Size targetVideoSize) {
if (targetVideoSize == null) {
return;
}
if (coreParameters.filterMode == RESCoreParameters.FILTER_MODE_SOFT) {
throw new IllegalArgumentException("soft mode doesn`t support reSetVideoSize");
}else {
videoClient.reSetVideoSize(targetVideoSize);
}
}
public RESRtmpSender getRtmpSender(){
return rtmpSender;
}
public String getVertion() {
return Constants.VERSION;
}
/**
* =====================PRIVATE=================
**/
private void checkDirection(RESConfig resConfig) {
int frontFlag = resConfig.getFrontCameraDirectionMode();
int backFlag = resConfig.getBackCameraDirectionMode();
int fbit = 0;
int bbit = 0;
if ((frontFlag >> 4) == 0) {
frontFlag |= RESCoreParameters.FLAG_DIRECTION_ROATATION_0;
}
if ((backFlag >> 4) == 0) {
backFlag |= RESCoreParameters.FLAG_DIRECTION_ROATATION_0;
}
for (int i = 4; i <= 8; ++i) {
if (((frontFlag >> i) & 0x1) == 1) {
fbit++;
}
if (((backFlag >> i) & 0x1) == 1) {
bbit++;
}
}
if (fbit != 1 || bbit != 1) {
throw new RuntimeException("invalid direction rotation flag:frontFlagNum=" + fbit + ",backFlagNum=" + bbit);
}
if (((frontFlag & RESCoreParameters.FLAG_DIRECTION_ROATATION_0) != 0) || ((frontFlag & RESCoreParameters.FLAG_DIRECTION_ROATATION_180) != 0)) {
fbit = 0;
} else {
fbit = 1;
}
if (((backFlag & RESCoreParameters.FLAG_DIRECTION_ROATATION_0) != 0) || ((backFlag & RESCoreParameters.FLAG_DIRECTION_ROATATION_180) != 0)) {
bbit = 0;
} else {
bbit = 1;
}
if (bbit != fbit) {
if (bbit == 0) {
throw new RuntimeException("invalid direction rotation flag:back camera is landscape but front camera is portrait");
} else {
throw new RuntimeException("invalid direction rotation flag:back camera is portrait but front camera is landscape");
}
}
if (fbit == 1) {
coreParameters.isPortrait = true;
} else {
coreParameters.isPortrait = false;
}
coreParameters.backCameraDirectionMode = backFlag;
coreParameters.frontCameraDirectionMode = frontFlag;
System.out.println("coreParameters.backCameraDirectionMode = " + coreParameters.backCameraDirectionMode);
}
public void setVideoEncoder(final MediaVideoEncoder encoder) {
videoClient.setVideoEncoder(encoder);
}
public void setMirror(boolean isEnableMirror,boolean isEnablePreviewMirror,boolean isEnableStreamMirror) {
videoClient.setMirror(isEnableMirror,isEnablePreviewMirror,isEnableStreamMirror);
}
public void setNeedResetEglContext(boolean bol){
videoClient.setNeedResetEglContext(bol);
}
public void setCreamAr(){
videoClient.setCameraArea();
}
static {
System.loadLibrary("restreaming");
}
}

View File

@@ -0,0 +1,525 @@
package me.lake.librestreaming.client;
import android.app.Activity;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import me.lake.librestreaming.core.CameraHelper;
import me.lake.librestreaming.core.RESHardVideoCore;
import me.lake.librestreaming.core.RESSoftVideoCore;
import me.lake.librestreaming.core.RESVideoCore;
import me.lake.librestreaming.core.listener.RESScreenShotListener;
import me.lake.librestreaming.core.listener.RESVideoChangeListener;
import me.lake.librestreaming.encoder.MediaVideoEncoder;
import me.lake.librestreaming.filter.hardvideofilter.BaseHardVideoFilter;
import me.lake.librestreaming.filter.softvideofilter.BaseSoftVideoFilter;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.model.Size;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.tools.BuffSizeCalculator;
import me.lake.librestreaming.tools.CameraUtil;
import me.lake.librestreaming.tools.LogTools;
public class RESVideoClient {
RESCoreParameters resCoreParameters;
private final Object syncOp = new Object();
private Camera camera;
public SurfaceTexture camTexture;
private int cameraNum;
public int currentCameraIndex;
private RESVideoCore videoCore;
private boolean isStreaming;
private boolean isPreviewing;
public RESVideoClient(RESCoreParameters parameters) {
resCoreParameters = parameters;
cameraNum = Camera.getNumberOfCameras();
currentCameraIndex = Camera.CameraInfo.CAMERA_FACING_BACK;
isStreaming = false;
isPreviewing = false;
}
public boolean prepare(RESConfig resConfig) {
synchronized (syncOp) {
if ((cameraNum - 1) >= resConfig.getDefaultCamera()) {
currentCameraIndex = resConfig.getDefaultCamera();
}
if (null == (camera = createCamera(currentCameraIndex))) {
LogTools.e("can not open camera");
return false;
}
Camera.Parameters parameters = camera.getParameters();
CameraHelper.selectCameraPreviewWH(parameters, resCoreParameters, resConfig.getTargetPreviewSize());
CameraHelper.selectCameraFpsRange(parameters, resCoreParameters);
if (resConfig.getVideoFPS() > resCoreParameters.previewMaxFps / 1000) {
resCoreParameters.videoFPS = resCoreParameters.previewMaxFps / 1000;
} else {
resCoreParameters.videoFPS = resConfig.getVideoFPS();
}
resoveResolution(resCoreParameters, resConfig.getTargetVideoSize());
if (!CameraHelper.selectCameraColorFormat(parameters, resCoreParameters)) {
LogTools.e("CameraHelper.selectCameraColorFormat,Failed");
resCoreParameters.dump();
return false;
}
if (!CameraHelper.configCamera(camera, resCoreParameters)) {
LogTools.e("CameraHelper.configCamera,Failed");
resCoreParameters.dump();
return false;
}
switch (resCoreParameters.filterMode) {
case RESCoreParameters.FILTER_MODE_SOFT:
videoCore = new RESSoftVideoCore(resCoreParameters);
break;
case RESCoreParameters.FILTER_MODE_HARD:
videoCore = new RESHardVideoCore(resCoreParameters);
break;
}
if (!videoCore.prepare(resConfig)) {
return false;
}
videoCore.setCurrentCamera(currentCameraIndex);
prepareVideo();
return true;
}
}
public Camera getCamera(){
return camera;
}
private Camera createCamera(int cameraId) {
try {
camera = Camera.open(cameraId);
CameraUtil.setCameraDisplayOrientation(activity,cameraId,camera);
} catch (SecurityException e) {
LogTools.trace("no permission", e);
return null;
} catch (Exception e) {
LogTools.trace("camera.open()failed", e);
return null;
}
return camera;
}
private boolean prepareVideo() {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_SOFT) {
camera.addCallbackBuffer(new byte[resCoreParameters.previewBufferSize]);
camera.addCallbackBuffer(new byte[resCoreParameters.previewBufferSize]);
}
return true;
}
private boolean startVideo() {
camTexture = new SurfaceTexture(RESVideoCore.OVERWATCH_TEXTURE_ID);
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_SOFT) {
camera.setPreviewCallbackWithBuffer(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized (syncOp) {
if (videoCore != null && data != null) {
((RESSoftVideoCore) videoCore).queueVideo(data);
}
camera.addCallbackBuffer(data);
}
}
});
} else {
camTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
synchronized (syncOp) {
if (videoCore != null) {
((RESHardVideoCore) videoCore).onFrameAvailable();
}
}
}
});
}
try {
camera.setPreviewTexture(camTexture);
} catch (IOException e) {
LogTools.trace(e);
camera.release();
return false;
}
camera.startPreview();
return true;
}
public boolean startPreview(SurfaceTexture surfaceTexture, int visualWidth, int visualHeight) {
synchronized (syncOp) {
if (!isStreaming && !isPreviewing) {
if (!startVideo()) {
resCoreParameters.dump();
LogTools.e("RESVideoClient,start(),failed");
return false;
}
videoCore.updateCamTexture(camTexture);
}
videoCore.startPreview(surfaceTexture, visualWidth, visualHeight);
isPreviewing = true;
return true;
}
}
public void updatePreview(int visualWidth, int visualHeight) {
videoCore.updatePreview(visualWidth, visualHeight);
}
public boolean stopPreview(boolean releaseTexture) {
synchronized (syncOp) {
if (isPreviewing) {
videoCore.stopPreview(releaseTexture);
if (!isStreaming) {
camera.stopPreview();
videoCore.updateCamTexture(null);
camTexture.release();
}
}
isPreviewing = false;
return true;
}
}
public boolean startStreaming(RESFlvDataCollecter flvDataCollecter) {
synchronized (syncOp) {
if (!isStreaming && !isPreviewing) {
if (!startVideo()) {
resCoreParameters.dump();
LogTools.e("RESVideoClient,start(),failed");
return false;
}
videoCore.updateCamTexture(camTexture);
}
videoCore.startStreaming(flvDataCollecter);
isStreaming = true;
return true;
}
}
public boolean stopStreaming() {
synchronized (syncOp) {
if (isStreaming) {
videoCore.stopStreaming();
if (!isPreviewing) {
camera.stopPreview();
videoCore.updateCamTexture(null);
camTexture.release();
}
}
isStreaming = false;
return true;
}
}
public boolean destroy() {
synchronized (syncOp) {
camera.release();
videoCore.destroy();
videoCore = null;
camera = null;
return true;
}
}
public boolean swapCamera() {
synchronized (syncOp) {
LogTools.d("RESClient,swapCamera()");
camera.stopPreview();
camera.release();
camera = null;
if (null == (camera = createCamera(currentCameraIndex = (++currentCameraIndex) % cameraNum))) {
LogTools.e("can not swap camera");
return false;
}
videoCore.setCurrentCamera(currentCameraIndex);
CameraHelper.selectCameraFpsRange(camera.getParameters(), resCoreParameters);
if (!CameraHelper.configCamera(camera, resCoreParameters)) {
camera.release();
return false;
}
prepareVideo();
camTexture.release();
videoCore.updateCamTexture(null);
startVideo();
videoCore.updateCamTexture(camTexture);
return true;
}
}
public boolean toggleFlashLight() {
synchronized (syncOp) {
try {
Camera.Parameters parameters = camera.getParameters();
List<String> flashModes = parameters.getSupportedFlashModes();
String flashMode = parameters.getFlashMode();
if (!Camera.Parameters.FLASH_MODE_TORCH.equals(flashMode)) {
if (flashModes.contains(Camera.Parameters.FLASH_MODE_TORCH)) {
parameters.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH);
camera.setParameters(parameters);
return true;
}
} else if (!Camera.Parameters.FLASH_MODE_OFF.equals(flashMode)) {
if (flashModes.contains(Camera.Parameters.FLASH_MODE_OFF)) {
parameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
camera.setParameters(parameters);
return true;
}
}
} catch (Exception e) {
LogTools.d("toggleFlashLight,failed" + e.getMessage());
return false;
}
return false;
}
}
public boolean setZoomByPercent(float targetPercent) {
synchronized (syncOp) {
targetPercent = Math.min(Math.max(0f, targetPercent), 1f);
Camera.Parameters p = camera.getParameters();
p.setZoom((int) (p.getMaxZoom() * targetPercent));
camera.setParameters(p);
return true;
}
}
public void reSetVideoBitrate(int bitrate) {
synchronized (syncOp) {
if (videoCore != null) {
videoCore.reSetVideoBitrate(bitrate);
}
}
}
public int getVideoBitrate() {
synchronized (syncOp) {
if (videoCore != null) {
return videoCore.getVideoBitrate();
} else {
return 0;
}
}
}
public void reSetVideoFPS(int fps) {
synchronized (syncOp) {
int targetFps;
if (fps > resCoreParameters.previewMaxFps / 1000) {
targetFps = resCoreParameters.previewMaxFps / 1000;
} else {
targetFps = fps;
}
if (videoCore != null) {
videoCore.reSetVideoFPS(targetFps);
}
}
}
public boolean reSetVideoSize(Size targetVideoSize) {
synchronized (syncOp) {
RESCoreParameters newParameters = new RESCoreParameters();
newParameters.isPortrait = resCoreParameters.isPortrait;
newParameters.filterMode = resCoreParameters.filterMode;
Camera.Parameters parameters = camera.getParameters();
CameraHelper.selectCameraPreviewWH(parameters, newParameters, targetVideoSize);
resoveResolution(newParameters, targetVideoSize);
boolean needRestartCamera = (newParameters.previewVideoHeight != resCoreParameters.previewVideoHeight
|| newParameters.previewVideoWidth != resCoreParameters.previewVideoWidth);
if (needRestartCamera) {
newParameters.previewBufferSize = BuffSizeCalculator.calculator(resCoreParameters.previewVideoWidth,
resCoreParameters.previewVideoHeight, resCoreParameters.previewColorFormat);
resCoreParameters.previewVideoWidth = newParameters.previewVideoWidth;
resCoreParameters.previewVideoHeight = newParameters.previewVideoHeight;
resCoreParameters.previewBufferSize = newParameters.previewBufferSize;
if ((isPreviewing || isStreaming)) {
LogTools.d("RESClient,reSetVideoSize.restartCamera");
camera.stopPreview();
camera.release();
camera = null;
if (null == (camera = createCamera(currentCameraIndex))) {
LogTools.e("can not createCamera camera");
return false;
}
if (!CameraHelper.configCamera(camera, resCoreParameters)) {
camera.release();
return false;
}
prepareVideo();
videoCore.updateCamTexture(null);
camTexture.release();
startVideo();
videoCore.updateCamTexture(camTexture);
}
}
videoCore.reSetVideoSize(newParameters);
return true;
}
}
public BaseSoftVideoFilter acquireSoftVideoFilter() {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_SOFT) {
return ((RESSoftVideoCore) videoCore).acquireVideoFilter();
}
return null;
}
public void releaseSoftVideoFilter() {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_SOFT) {
((RESSoftVideoCore) videoCore).releaseVideoFilter();
}
}
public void setSoftVideoFilter(BaseSoftVideoFilter baseSoftVideoFilter) {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_SOFT) {
((RESSoftVideoCore) videoCore).setVideoFilter(baseSoftVideoFilter);
}
}
public BaseHardVideoFilter acquireHardVideoFilter() {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_HARD) {
return ((RESHardVideoCore) videoCore).acquireVideoFilter();
}
return null;
}
public void releaseHardVideoFilter() {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_HARD) {
((RESHardVideoCore) videoCore).releaseVideoFilter();
}
}
public void setHardVideoFilter(BaseHardVideoFilter baseHardVideoFilter) {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_HARD) {
System.err.println("videoCore is null ?"+videoCore);
((RESHardVideoCore) videoCore).setVideoFilter(baseHardVideoFilter);
}
}
public void takeScreenShot(RESScreenShotListener listener) {
synchronized (syncOp) {
if (videoCore != null) {
videoCore.takeScreenShot(listener);
}
}
}
public void setVideoChangeListener(RESVideoChangeListener listener) {
synchronized (syncOp) {
if (videoCore != null) {
videoCore.setVideoChangeListener(listener);
}
}
}
public float getDrawFrameRate() {
synchronized (syncOp) {
return videoCore == null ? 0 : videoCore.getDrawFrameRate();
}
}
private void resoveResolution(RESCoreParameters resCoreParameters, Size targetVideoSize) {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_SOFT) {
if (resCoreParameters.isPortrait) {
resCoreParameters.videoHeight = resCoreParameters.previewVideoWidth;
resCoreParameters.videoWidth = resCoreParameters.previewVideoHeight;
} else {
resCoreParameters.videoWidth = resCoreParameters.previewVideoWidth;
resCoreParameters.videoHeight = resCoreParameters.previewVideoHeight;
}
} else {
float pw, ph, vw, vh;
if (resCoreParameters.isPortrait) {
resCoreParameters.videoHeight = targetVideoSize.getWidth();
resCoreParameters.videoWidth = targetVideoSize.getHeight();
pw = resCoreParameters.previewVideoHeight;
ph = resCoreParameters.previewVideoWidth;
} else {
resCoreParameters.videoWidth = targetVideoSize.getWidth();
resCoreParameters.videoHeight = targetVideoSize.getHeight();
pw = resCoreParameters.previewVideoWidth;
ph = resCoreParameters.previewVideoHeight;
}
vw = resCoreParameters.videoWidth;
vh = resCoreParameters.videoHeight;
float pr = ph / pw, vr = vh / vw;
if (pr == vr) {
resCoreParameters.cropRatio = 0.0f;
} else if (pr > vr) {
resCoreParameters.cropRatio = (1.0f - vr / pr) / 2.0f;
} else {
resCoreParameters.cropRatio = -(1.0f - pr / vr) / 2.0f;
}
}
}
public void setVideoEncoder(final MediaVideoEncoder encoder) {
videoCore.setVideoEncoder(encoder);
}
public void setMirror(boolean isEnableMirror,boolean isEnablePreviewMirror,boolean isEnableStreamMirror) {
videoCore.setMirror(isEnableMirror,isEnablePreviewMirror,isEnableStreamMirror);
}
public void setNeedResetEglContext(boolean bol){
videoCore.setNeedResetEglContext(bol);
}
public void setActivity(Activity activity) {
this.activity=activity;
}
private Activity activity;
/**
* 转换对焦区域
* 范围(-1000, -1000, 1000, 1000)
* x,y是坐标位置width,height SurfaceView的宽高coefficient是区域比例大小
*/
private static Rect calculateTapArea(float x, float y, int width, int height, float coefficient) {
float focusAreaSize = 200;
//这段代码可以看出coefficient的作用只是为了扩展areaSize。
int areaSize = (int) (focusAreaSize * coefficient);
int surfaceWidth = width;
int surfaceHeight = height;
//解释一下为什么*2000因为要把surfaceView的坐标转换为范围(-1000, -1000, 1000, 1000)则SurfaceView的中心点坐标会转化为0,0x/surfaceWidth 得到当前x坐标占总宽度的比例然后乘以2000就换算成了0,02000,2000的坐标范围内然后减去1000就换算为了范围(-1000, -1000, 1000, 1000)的坐标。
//得到了x,y转换后的坐标利用areaSize就可以得到聚焦区域。
int centerX = (int) (x / surfaceHeight * 2000 - 1000);
int centerY = (int) (y / surfaceWidth * 2000 - 1000);
int left = clamp(centerX - (areaSize / 2), -1000, 1000);
int top = clamp(centerY - (areaSize / 2), -1000, 1000);
int right = clamp(left + areaSize, -1000, 1000);
int bottom = clamp(top + areaSize, -1000, 1000);
return new Rect(left, top, right, bottom);
}
//不大于最大值,不小于最小值
private static int clamp(int x, int min, int max) {
if (x > max) {
return max;
}
if (x < min) {
return min;
}
return x;
}
public void setCameraArea(){
System.out.println("设置对焦");
List<Camera.Area> focusAreas = new ArrayList<>();
focusAreas.add(new Camera.Area(calculateTapArea(0,0,5,5,1.0f), 800));
Camera.Parameters parameters=camera.getParameters();
parameters.setFocusAreas(focusAreas);
camera.setParameters(parameters);
camera.autoFocus(new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean b, Camera camera) {
System.out.println("对焦完成");
}
});
}
}

View File

@@ -0,0 +1,109 @@
package me.lake.librestreaming.core;
import android.media.MediaCodec;
import java.nio.ByteBuffer;
import me.lake.librestreaming.rtmp.RESFlvData;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.rtmp.RESRtmpSender;
import me.lake.librestreaming.tools.LogTools;
public class AudioSenderThread extends Thread {
private static final long WAIT_TIME = 5000;//1ms;
private MediaCodec.BufferInfo eInfo;
private long startTime = 0;
private MediaCodec dstAudioEncoder;
private RESFlvDataCollecter dataCollecter;
AudioSenderThread(String name, MediaCodec encoder, RESFlvDataCollecter flvDataCollecter) {
super(name);
eInfo = new MediaCodec.BufferInfo();
startTime = 0;
dstAudioEncoder = encoder;
dataCollecter = flvDataCollecter;
}
private boolean shouldQuit = false;
void quit() {
shouldQuit = true;
this.interrupt();
}
@Override
public void run() {
while (!shouldQuit) {
int eobIndex = dstAudioEncoder.dequeueOutputBuffer(eInfo, WAIT_TIME);
switch (eobIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
LogTools.d("AudioSenderThread,MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED");
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
// LogTools.d("AudioSenderThread,MediaCodec.INFO_TRY_AGAIN_LATER");
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
LogTools.d("AudioSenderThread,MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:" +
dstAudioEncoder.getOutputFormat().toString());
ByteBuffer csd0 = dstAudioEncoder.getOutputFormat().getByteBuffer("csd-0");
sendAudioSpecificConfig(0, csd0);
break;
default:
LogTools.d("AudioSenderThread,MediaCode,eobIndex=" + eobIndex);
if (startTime == 0) {
startTime = eInfo.presentationTimeUs / 1000;
}
/**
* we send audio SpecificConfig already in INFO_OUTPUT_FORMAT_CHANGED
* so we ignore MediaCodec.BUFFER_FLAG_CODEC_CONFIG
*/
if (eInfo.flags != MediaCodec.BUFFER_FLAG_CODEC_CONFIG && eInfo.size != 0) {
ByteBuffer realData = dstAudioEncoder.getOutputBuffers()[eobIndex];
realData.position(eInfo.offset);
realData.limit(eInfo.offset + eInfo.size);
sendRealData((eInfo.presentationTimeUs / 1000) - startTime, realData);
}
dstAudioEncoder.releaseOutputBuffer(eobIndex, false);
break;
}
}
eInfo = null;
}
private void sendAudioSpecificConfig(long tms, ByteBuffer realData) {
int packetLen = Packager.FLVPackager.FLV_AUDIO_TAG_LENGTH +
realData.remaining();
byte[] finalBuff = new byte[packetLen];
realData.get(finalBuff, Packager.FLVPackager.FLV_AUDIO_TAG_LENGTH,
realData.remaining());
Packager.FLVPackager.fillFlvAudioTag(finalBuff,
0,
true);
RESFlvData resFlvData = new RESFlvData();
resFlvData.droppable = false;
resFlvData.byteBuffer = finalBuff;
resFlvData.size = finalBuff.length;
resFlvData.dts = (int) tms;
resFlvData.flvTagType = RESFlvData.FLV_RTMP_PACKET_TYPE_AUDIO;
dataCollecter.collect(resFlvData, RESRtmpSender.FROM_AUDIO);
}
private void sendRealData(long tms, ByteBuffer realData) {
int packetLen = Packager.FLVPackager.FLV_AUDIO_TAG_LENGTH +
realData.remaining();
byte[] finalBuff = new byte[packetLen];
realData.get(finalBuff, Packager.FLVPackager.FLV_AUDIO_TAG_LENGTH,
realData.remaining());
Packager.FLVPackager.fillFlvAudioTag(finalBuff,
0,
false);
RESFlvData resFlvData = new RESFlvData();
resFlvData.droppable = true;
resFlvData.byteBuffer = finalBuff;
resFlvData.size = finalBuff.length;
resFlvData.dts = (int) tms;
resFlvData.flvTagType = RESFlvData.FLV_RTMP_PACKET_TYPE_AUDIO;
dataCollecter.collect(resFlvData, RESRtmpSender.FROM_AUDIO);
}
}

View File

@@ -0,0 +1,131 @@
package me.lake.librestreaming.core;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.hardware.Camera;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.model.Size;
import me.lake.librestreaming.tools.LogTools;
public class CameraHelper {
public static int targetFps = 30000;
private static int[] supportedSrcVideoFrameColorType = new int[]{ImageFormat.NV21, ImageFormat.YV12};
public static boolean configCamera(Camera camera, RESCoreParameters coreParameters) {
camera.cancelAutoFocus();
Camera.Parameters parameters = camera.getParameters();
parameters.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO);
List<String> focusModes = parameters.getSupportedFocusModes();
if (focusModes != null) {
for (String focusMode : focusModes) {
System.out.println("focusMode = " + focusMode);
}
/*if (focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
} else if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
} else if (focusModes.contains(Camera.Parameters.FOCUS_MODE_FIXED)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_FIXED);
}*/
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
}
parameters.setPreviewSize(coreParameters.previewVideoWidth, coreParameters.previewVideoHeight);
parameters.setPreviewFpsRange(coreParameters.previewMinFps, coreParameters.previewMaxFps);
try {
camera.setParameters(parameters);
} catch (Exception e) {
camera.release();
return false;
}
return true;
}
public static void selectCameraFpsRange(Camera.Parameters parameters, RESCoreParameters coreParameters) {
List<int[]> fpsRanges = parameters.getSupportedPreviewFpsRange();
/* Collections.sort(fpsRanges, new Comparator<int[]>() {
@Override
public int compare(int[] lhs, int[] rhs) {
int r = Math.abs(lhs[0] - targetFps) + Math.abs(lhs[1] - targetFps);
int l = Math.abs(rhs[0] - targetFps) + Math.abs(rhs[1] - targetFps);
if (r > l) {
return 1;
} else if (r < l) {
return -1;
} else {
return 0;
}
}
});*/
coreParameters.previewMinFps=fpsRanges.get(0)[0];
for (int[] fpsRange : fpsRanges) {
if(coreParameters.previewMinFps>fpsRange[0]){
coreParameters.previewMinFps=fpsRange[0];
}
if(coreParameters.previewMaxFps<fpsRange[1]){
coreParameters.previewMaxFps=fpsRange[1];
}
}
for (int[] range : fpsRanges) {
System.out.println("最小fps = " + range[0]);
System.out.println("最大fps = " + range[1]);
System.out.println("--------------");
}
System.out.println("最小fps *= " + coreParameters.previewMinFps);
System.out.println("最大fps *= " + coreParameters.previewMaxFps);
System.out.println("fpsRanges.size() = " + fpsRanges.size());
}
public static void selectCameraPreviewWH(Camera.Parameters parameters, RESCoreParameters coreParameters, Size targetSize) {
List<Camera.Size> previewsSizes = parameters.getSupportedPreviewSizes();
Collections.sort(previewsSizes, new Comparator<Camera.Size>() {
@Override
public int compare(Camera.Size lhs, Camera.Size rhs) {
if ((lhs.width * lhs.height) > (rhs.width * rhs.height)) {
return 1;
} else {
return -1;
}
}
});
for (Camera.Size size : previewsSizes) {
if (size.width >= targetSize.getWidth() && size.height >= targetSize.getHeight()) {
coreParameters.previewVideoWidth = size.width;
coreParameters.previewVideoHeight = size.height;
return;
}
}
}
public static boolean selectCameraColorFormat(Camera.Parameters parameters, RESCoreParameters coreParameters) {
List<Integer> srcColorTypes = new LinkedList<>();
List<Integer> supportedPreviewFormates = parameters.getSupportedPreviewFormats();
for (int colortype : supportedSrcVideoFrameColorType) {
if (supportedPreviewFormates.contains(colortype)) {
srcColorTypes.add(colortype);
}
}
//select preview colorformat
if (srcColorTypes.contains(coreParameters.previewColorFormat = ImageFormat.NV21)) {
coreParameters.previewColorFormat = ImageFormat.NV21;
} else if ((srcColorTypes.contains(coreParameters.previewColorFormat = ImageFormat.YV12))) {
coreParameters.previewColorFormat = ImageFormat.YV12;
} else {
LogTools.e("!!!!!!!!!!!UnSupport,previewColorFormat");
return false;
}
return true;
}
}

View File

@@ -0,0 +1,19 @@
package me.lake.librestreaming.core;
@SuppressWarnings("all")
public class ColorHelper {
static public native void NV21TOYUV420SP(byte[] src, byte[] dst, int YSize);
static public native void NV21TOYUV420P(byte[] src, byte[] dst, int YSize);
static public native void YUV420SPTOYUV420P(byte[] src, byte[] dst, int YSize);
static public native void NV21TOARGB(byte[] src, int[] dst, int width,int height);
static public native void FIXGLPIXEL(int[] src,int[] dst, int width,int height);
//slow
static public native void NV21Transform(byte[] src, byte[] dst, int srcwidth,int srcheight,int directionFlag);
}

View File

@@ -0,0 +1,499 @@
package me.lake.librestreaming.core;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import android.view.Surface;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import javax.microedition.khronos.egl.EGL10;
import me.lake.librestreaming.model.MediaCodecGLWapper;
import me.lake.librestreaming.model.OffScreenGLWapper;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.model.ScreenGLWapper;
import me.lake.librestreaming.tools.GLESTools;
public class GLHelper {
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
private static String VERTEXSHADER = "" +
"attribute vec4 aPosition;\n" +
"attribute vec2 aTextureCoord;\n" +
"varying vec2 vTextureCoord;\n" +
"void main(){\n" +
" gl_Position= aPosition;\n" +
" vTextureCoord = aTextureCoord;\n" +
"}";
private static final String VERTEXSHADER_CAMERA2D =
"attribute vec4 aPosition;\n" +
"attribute vec4 aTextureCoord;\n" +
"uniform mat4 uTextureMatrix;\n" +
"varying vec2 vTextureCoord;\n" +
"void main(){\n" +
" gl_Position= aPosition;\n" +
" vTextureCoord = (uTextureMatrix * aTextureCoord).xy;\n" +
"}";
private static String FRAGMENTSHADER_CAMERA = "" +
"#extension GL_OES_EGL_image_external : require\n" +
"precision highp float;\n" +
"varying highp vec2 vTextureCoord;\n" +
"uniform sampler2D uTexture;\n" +
"void main(){\n" +
" vec4 color = texture2D(uTexture, vTextureCoord);\n" +
" gl_FragColor = color;\n" +
"}";
private static String FRAGMENTSHADER_CAMERA2D = "" +
"#extension GL_OES_EGL_image_external : require\n" +
"precision highp float;\n" +
"varying highp vec2 vTextureCoord;\n" +
"uniform samplerExternalOES uTexture;\n" +
"void main(){\n" +
" vec4 color = texture2D(uTexture, vTextureCoord);\n" +
" gl_FragColor = color;\n" +
"}";
private static String FRAGMENTSHADER_2D = "" +
"precision highp float;\n" +
"varying highp vec2 vTextureCoord;\n" +
"uniform sampler2D uTexture;\n" +
"void main(){\n" +
" vec4 color = texture2D(uTexture, vTextureCoord);\n" +
" gl_FragColor = color;\n" +
"}";
private static short drawIndices[] = {0, 1, 2, 0, 2, 3};
private static float SquareVertices[] = {
-1.0f, 1.0f,
-1.0f, -1.0f,
1.0f, -1.0f,
1.0f, 1.0f};
private static float CamTextureVertices[] = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f};
private static float Cam2dTextureVertices[] = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f};
private static float Cam2dTextureVertices_90[] = {
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f};
private static float Cam2dTextureVertices_180[] = {
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f,
0.0f, 0.0f};
private static float Cam2dTextureVertices_270[] = {
1.0f, 1.0f,
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f};
public static float MediaCodecTextureVertices[] = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f};
//镜像后的
// public static float MediaCodecTextureVertices[] = {
// 1.0f, 1.0f,
// 1.0f, 0.0f,
// 0.0f, 0.0f,
// 0.0f, 1.0f};
private static float ScreenTextureVertices[] = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f};
// private static float ScreenTextureVertices[] = {
// 1.0f, 1.0f,
// 1.0f, 0.0f,
// 0.0f, 0.0f,
// 0.0f, 1.0f};
public static int FLOAT_SIZE_BYTES = 4;
public static int SHORT_SIZE_BYTES = 2;
public static int COORDS_PER_VERTEX = 2;
public static int TEXTURE_COORDS_PER_VERTEX = 2;
public static void initOffScreenGL(OffScreenGLWapper wapper) {
wapper.eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (EGL14.EGL_NO_DISPLAY == wapper.eglDisplay) {
throw new RuntimeException("eglGetDisplay,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int versions[] = new int[2];
if (!EGL14.eglInitialize(wapper.eglDisplay, versions, 0, versions, 1)) {
throw new RuntimeException("eglInitialize,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int configsCount[] = new int[1];
EGLConfig configs[] = new EGLConfig[1];
int configSpec[] = new int[]{
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_DEPTH_SIZE, 0,
EGL14.EGL_STENCIL_SIZE, 0,
EGL14.EGL_NONE
};
EGL14.eglChooseConfig(wapper.eglDisplay, configSpec, 0, configs, 0, 1, configsCount, 0);
if (configsCount[0] <= 0) {
throw new RuntimeException("eglChooseConfig,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
wapper.eglConfig = configs[0];
int[] surfaceAttribs = {
EGL10.EGL_WIDTH, 1,
EGL10.EGL_HEIGHT, 1,
EGL14.EGL_NONE
};
int contextSpec[] = new int[]{
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
wapper.eglContext = EGL14.eglCreateContext(wapper.eglDisplay, wapper.eglConfig, EGL14.EGL_NO_CONTEXT, contextSpec, 0);
if (EGL14.EGL_NO_CONTEXT == wapper.eglContext) {
throw new RuntimeException("eglCreateContext,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int[] values = new int[1];
EGL14.eglQueryContext(wapper.eglDisplay, wapper.eglContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, values, 0);
wapper.eglSurface = EGL14.eglCreatePbufferSurface(wapper.eglDisplay, wapper.eglConfig, surfaceAttribs, 0);
if (null == wapper.eglSurface || EGL14.EGL_NO_SURFACE == wapper.eglSurface) {
throw new RuntimeException("eglCreateWindowSurface,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
}
public static void initMediaCodecGL(MediaCodecGLWapper wapper, EGLContext sharedContext, Surface mediaInputSurface) {
wapper.eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (EGL14.EGL_NO_DISPLAY == wapper.eglDisplay) {
throw new RuntimeException("eglGetDisplay,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int versions[] = new int[2];
if (!EGL14.eglInitialize(wapper.eglDisplay, versions, 0, versions, 1)) {
throw new RuntimeException("eglInitialize,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int configsCount[] = new int[1];
EGLConfig configs[] = new EGLConfig[1];
int configSpec[] = new int[]{
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL_RECORDABLE_ANDROID, 1,
EGL14.EGL_DEPTH_SIZE, 0,
EGL14.EGL_STENCIL_SIZE, 0,
EGL14.EGL_NONE
};
EGL14.eglChooseConfig(wapper.eglDisplay, configSpec, 0, configs, 0, 1, configsCount, 0);
if (configsCount[0] <= 0) {
throw new RuntimeException("eglChooseConfig,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
wapper.eglConfig = configs[0];
int[] surfaceAttribs = {
EGL14.EGL_NONE
};
int contextSpec[] = new int[]{
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
wapper.eglContext = EGL14.eglCreateContext(wapper.eglDisplay, wapper.eglConfig, sharedContext, contextSpec, 0);
if (EGL14.EGL_NO_CONTEXT == wapper.eglContext) {
throw new RuntimeException("eglCreateContext,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int[] values = new int[1];
EGL14.eglQueryContext(wapper.eglDisplay, wapper.eglContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, values, 0);
wapper.eglSurface = EGL14.eglCreateWindowSurface(wapper.eglDisplay, wapper.eglConfig, mediaInputSurface, surfaceAttribs, 0);
if (null == wapper.eglSurface || EGL14.EGL_NO_SURFACE == wapper.eglSurface) {
throw new RuntimeException("eglCreateWindowSurface,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
}
public static void initScreenGL(ScreenGLWapper wapper, EGLContext sharedContext, SurfaceTexture screenSurface) {
wapper.eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (EGL14.EGL_NO_DISPLAY == wapper.eglDisplay) {
throw new RuntimeException("eglGetDisplay,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int versions[] = new int[2];
if (!EGL14.eglInitialize(wapper.eglDisplay, versions, 0, versions, 1)) {
throw new RuntimeException("eglInitialize,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int configsCount[] = new int[1];
EGLConfig configs[] = new EGLConfig[1];
int configSpec[] = new int[]{
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_DEPTH_SIZE, 0,
EGL14.EGL_STENCIL_SIZE, 0,
EGL14.EGL_NONE
};
EGL14.eglChooseConfig(wapper.eglDisplay, configSpec, 0, configs, 0, 1, configsCount, 0);
if (configsCount[0] <= 0) {
throw new RuntimeException("eglChooseConfig,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
wapper.eglConfig = configs[0];
int[] surfaceAttribs = {
EGL14.EGL_NONE
};
int contextSpec[] = new int[]{
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
wapper.eglContext = EGL14.eglCreateContext(wapper.eglDisplay, wapper.eglConfig, sharedContext, contextSpec, 0);
if (EGL14.EGL_NO_CONTEXT == wapper.eglContext) {
throw new RuntimeException("eglCreateContext,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int[] values = new int[1];
EGL14.eglQueryContext(wapper.eglDisplay, wapper.eglContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, values, 0);
wapper.eglSurface = EGL14.eglCreateWindowSurface(wapper.eglDisplay, wapper.eglConfig, screenSurface, surfaceAttribs, 0);
if (null == wapper.eglSurface || EGL14.EGL_NO_SURFACE == wapper.eglSurface) {
throw new RuntimeException("eglCreateWindowSurface,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
}
public static void makeCurrent(OffScreenGLWapper wapper) {
if (!EGL14.eglMakeCurrent(wapper.eglDisplay, wapper.eglSurface, wapper.eglSurface, wapper.eglContext)) {
throw new RuntimeException("eglMakeCurrent,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
}
public static void makeCurrent(MediaCodecGLWapper wapper) {
if (!EGL14.eglMakeCurrent(wapper.eglDisplay, wapper.eglSurface, wapper.eglSurface, wapper.eglContext)) {
throw new RuntimeException("eglMakeCurrent,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
}
public static void makeCurrent(ScreenGLWapper wapper) {
if (!EGL14.eglMakeCurrent(wapper.eglDisplay, wapper.eglSurface, wapper.eglSurface, wapper.eglContext)) {
throw new RuntimeException("eglMakeCurrent,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
}
public static void createCamFrameBuff(int[] frameBuffer, int[] frameBufferTex, int width, int height) {
GLES20.glGenFramebuffers(1, frameBuffer, 0);
GLES20.glGenTextures(1, frameBufferTex, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, frameBufferTex[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer[0]);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, frameBufferTex[0], 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLESTools.checkGlError("createCamFrameBuff");
}
public static void enableVertex(int posLoc, int texLoc, FloatBuffer shapeBuffer, FloatBuffer texBuffer) {
GLES20.glEnableVertexAttribArray(posLoc);
GLES20.glEnableVertexAttribArray(texLoc);
GLES20.glVertexAttribPointer(posLoc, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
COORDS_PER_VERTEX * 4, shapeBuffer);
GLES20.glVertexAttribPointer(texLoc, TEXTURE_COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
TEXTURE_COORDS_PER_VERTEX * 4, texBuffer);
}
public static void disableVertex(int posLoc, int texLoc) {
GLES20.glDisableVertexAttribArray(posLoc);
GLES20.glDisableVertexAttribArray(texLoc);
}
public static int createCamera2DProgram() {
return GLESTools.createProgram(VERTEXSHADER_CAMERA2D, FRAGMENTSHADER_CAMERA2D);
}
public static int createCameraProgram() {
return GLESTools.createProgram(VERTEXSHADER, FRAGMENTSHADER_CAMERA);
}
public static int createMediaCodecProgram() {
return GLESTools.createProgram(VERTEXSHADER, FRAGMENTSHADER_2D);
}
public static int createScreenProgram() {
return GLESTools.createProgram(VERTEXSHADER, FRAGMENTSHADER_2D);
}
public static ShortBuffer getDrawIndecesBuffer() {
ShortBuffer result = ByteBuffer.allocateDirect(SHORT_SIZE_BYTES * drawIndices.length).
order(ByteOrder.nativeOrder()).
asShortBuffer();
result.put(drawIndices);
result.position(0);
return result;
}
public static FloatBuffer getShapeVerticesBuffer() {
FloatBuffer result = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * SquareVertices.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
result.put(SquareVertices);
result.position(0);
return result;
}
public static FloatBuffer getMediaCodecTextureVerticesBuffer() {
FloatBuffer result = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * MediaCodecTextureVertices.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
result.put(MediaCodecTextureVertices);
result.position(0);
return result;
}
public static FloatBuffer getScreenTextureVerticesBuffer() {
FloatBuffer result = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * ScreenTextureVertices.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
result.put(ScreenTextureVertices);
result.position(0);
return result;
}
public static FloatBuffer getCamera2DTextureVerticesBuffer(final int directionFlag, final float cropRatio) {
if (directionFlag == -1) {
FloatBuffer result = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * Cam2dTextureVertices.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
result.put(CamTextureVertices);
result.position(0);
return result;
}
float[] buffer;
switch (directionFlag & 0xF0) {
case RESCoreParameters.FLAG_DIRECTION_ROATATION_90:
buffer = Cam2dTextureVertices_90.clone();
break;
case RESCoreParameters.FLAG_DIRECTION_ROATATION_180:
buffer = Cam2dTextureVertices_180.clone();
break;
case RESCoreParameters.FLAG_DIRECTION_ROATATION_270:
buffer = Cam2dTextureVertices_270.clone();
break;
default:
buffer = Cam2dTextureVertices.clone();
}
if ((directionFlag & 0xF0) == RESCoreParameters.FLAG_DIRECTION_ROATATION_0 || (directionFlag & 0xF0) == RESCoreParameters.FLAG_DIRECTION_ROATATION_180) {
if (cropRatio > 0) {
buffer[1] = buffer[1] == 1.0f ? (1.0f - cropRatio) : cropRatio;
buffer[3] = buffer[3] == 1.0f ? (1.0f - cropRatio) : cropRatio;
buffer[5] = buffer[5] == 1.0f ? (1.0f - cropRatio) : cropRatio;
buffer[7] = buffer[7] == 1.0f ? (1.0f - cropRatio) : cropRatio;
} else {
buffer[0] = buffer[0] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
buffer[2] = buffer[2] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
buffer[4] = buffer[4] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
buffer[6] = buffer[6] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
}
} else {
if (cropRatio > 0) {
buffer[0] = buffer[0] == 1.0f ? (1.0f - cropRatio) : cropRatio;
buffer[2] = buffer[2] == 1.0f ? (1.0f - cropRatio) : cropRatio;
buffer[4] = buffer[4] == 1.0f ? (1.0f - cropRatio) : cropRatio;
buffer[6] = buffer[6] == 1.0f ? (1.0f - cropRatio) : cropRatio;
} else {
buffer[1] = buffer[1] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
buffer[3] = buffer[3] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
buffer[5] = buffer[5] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
buffer[7] = buffer[7] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
}
}
if ((directionFlag & RESCoreParameters.FLAG_DIRECTION_FLIP_HORIZONTAL) != 0) {
buffer[0] = flip(buffer[0]);
buffer[2] = flip(buffer[2]);
buffer[4] = flip(buffer[4]);
buffer[6] = flip(buffer[6]);
}
if ((directionFlag & RESCoreParameters.FLAG_DIRECTION_FLIP_VERTICAL) != 0) {
buffer[1] = flip(buffer[1]);
buffer[3] = flip(buffer[3]);
buffer[5] = flip(buffer[5]);
buffer[7] = flip(buffer[7]);
}
FloatBuffer result = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * buffer.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
result.put(buffer);
result.position(0);
return result;
}
public static FloatBuffer getCameraTextureVerticesBuffer() {
FloatBuffer result = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * Cam2dTextureVertices.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
result.put(CamTextureVertices);
result.position(0);
return result;
}
private static float flip(final float i) {
return (1.0f - i);
}
public static FloatBuffer adjustTextureFlip(boolean flipHorizontal) {
float[] textureCords = getFlip(flipHorizontal, false);
FloatBuffer mTextureBuffer = null;
if (mTextureBuffer == null) {
mTextureBuffer = ByteBuffer.allocateDirect(textureCords.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
}
mTextureBuffer.clear();
mTextureBuffer.put(textureCords).position(0);
return mTextureBuffer;
}
public static float[] getFlip(final boolean flipHorizontal,
final boolean flipVertical) {
float[] rotatedTex = Cam2dTextureVertices;
if (flipHorizontal) {
rotatedTex = new float[]{
flip2(rotatedTex[0]), rotatedTex[1],
flip2(rotatedTex[2]), rotatedTex[3],
flip2(rotatedTex[4]), rotatedTex[5],
flip2(rotatedTex[6]), rotatedTex[7],
};
}
if (flipVertical) {
rotatedTex = new float[]{
rotatedTex[0], flip2(rotatedTex[1]),
rotatedTex[2], flip2(rotatedTex[3]),
rotatedTex[4], flip2(rotatedTex[5]),
rotatedTex[6], flip2(rotatedTex[7]),
};
}
return rotatedTex;
}
private static float flip2(final float i) {
if (i == 0.0f) {
return 1.0f;
}
return 0.0f;
}
}

View File

@@ -0,0 +1,137 @@
package me.lake.librestreaming.core;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import java.io.IOException;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.tools.LogTools;
public class MediaCodecHelper {
public static MediaCodec createSoftVideoMediaCodec(RESCoreParameters coreParameters, MediaFormat videoFormat) {
videoFormat.setString(MediaFormat.KEY_MIME, "video/avc");
videoFormat.setInteger(MediaFormat.KEY_WIDTH, coreParameters.videoWidth);
videoFormat.setInteger(MediaFormat.KEY_HEIGHT, coreParameters.videoHeight);
videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, coreParameters.mediacdoecAVCBitRate);
videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, coreParameters.mediacodecAVCFrameRate);
videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, coreParameters.mediacodecAVCIFrameInterval);
videoFormat.setInteger(MediaFormat.KEY_PROFILE, MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline);
videoFormat.setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel31);
videoFormat.setInteger(MediaFormat.KEY_BITRATE_MODE, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR);
MediaCodec result = null;
try {
result = MediaCodec.createEncoderByType(videoFormat.getString(MediaFormat.KEY_MIME));
//select color
int[] colorful = result.getCodecInfo().getCapabilitiesForType(videoFormat.getString(MediaFormat.KEY_MIME)).colorFormats;
int dstVideoColorFormat = -1;
//select mediacodec colorformat
if (isArrayContain(colorful, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar)) {
dstVideoColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
coreParameters.mediacodecAVCColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
}
if (dstVideoColorFormat == -1 && isArrayContain(colorful, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar)) {
dstVideoColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar;
coreParameters.mediacodecAVCColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar;
}
if (dstVideoColorFormat == -1) {
LogTools.e("!!!!!!!!!!!UnSupport,mediaCodecColorFormat");
return null;
}
videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, dstVideoColorFormat);
//selectprofile
// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
// MediaCodecInfo.CodecProfileLevel[] profileLevels = result.getCodecInfo().getCapabilitiesForType(videoFormat.getString(MediaFormat.KEY_MIME)).profileLevels;
// if (isProfileContain(profileLevels, MediaCodecInfo.CodecProfileLevel.AVCProfileMain)) {
// coreParameters.mediacodecAVCProfile = MediaCodecInfo.CodecProfileLevel.AVCProfileMain;
// coreParameters.mediacodecAVClevel = MediaCodecInfo.CodecProfileLevel.AVCLevel31;
// } else {
// coreParameters.mediacodecAVCProfile = MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline;
// coreParameters.mediacodecAVClevel = MediaCodecInfo.CodecProfileLevel.AVCLevel31;
// }
// videoFormat.setInteger(MediaFormat.KEY_PROFILE, coreParameters.mediacodecAVCProfile);
// //level must be set even below M
// videoFormat.setInteger(MediaFormat.KEY_LEVEL, coreParameters.mediacodecAVClevel);
// }
} catch (IOException e) {
LogTools.trace(e);
return null;
}
return result;
}
public static MediaCodec createAudioMediaCodec(RESCoreParameters coreParameters, MediaFormat audioFormat) {
//Audio
MediaCodec result;
audioFormat.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, coreParameters.mediacodecAACProfile);
audioFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, coreParameters.mediacodecAACSampleRate);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, coreParameters.mediacodecAACChannelCount);
audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, coreParameters.mediacodecAACBitRate);
audioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, coreParameters.mediacodecAACMaxInputSize);
LogTools.d("creatingAudioEncoder,format=" + audioFormat.toString());
try {
result = MediaCodec.createEncoderByType(audioFormat.getString(MediaFormat.KEY_MIME));
} catch (Exception e) {
LogTools.trace("can`t create audioEncoder!", e);
return null;
}
return result;
}
public static MediaCodec createHardVideoMediaCodec(RESCoreParameters coreParameters, MediaFormat videoFormat) {
videoFormat.setString(MediaFormat.KEY_MIME, "video/avc");
videoFormat.setInteger(MediaFormat.KEY_WIDTH, coreParameters.videoWidth);
videoFormat.setInteger(MediaFormat.KEY_HEIGHT, coreParameters.videoHeight);
videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, coreParameters.mediacdoecAVCBitRate);
videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, coreParameters.mediacodecAVCFrameRate);
videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, coreParameters.mediacodecAVCIFrameInterval);
videoFormat.setInteger(MediaFormat.KEY_PROFILE, MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline);
videoFormat.setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel31);
videoFormat.setInteger(MediaFormat.KEY_BITRATE_MODE, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR);
videoFormat.setInteger(MediaFormat.KEY_COMPLEXITY, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR);//added by wangshuo
MediaCodec result = null;
try {
result = MediaCodec.createEncoderByType(videoFormat.getString(MediaFormat.KEY_MIME));
//selectprofile
// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
// MediaCodecInfo.CodecProfileLevel[] profileLevels = result.getCodecInfo().getCapabilitiesForType(videoFormat.getString(MediaFormat.KEY_MIME)).profileLevels;
// if (isProfileContain(profileLevels, MediaCodecInfo.CodecProfileLevel.AVCProfileMain)) {
// coreParameters.mediacodecAVCProfile = MediaCodecInfo.CodecProfileLevel.AVCProfileMain;
// coreParameters.mediacodecAVClevel = MediaCodecInfo.CodecProfileLevel.AVCLevel31;
// } else {
// coreParameters.mediacodecAVCProfile = MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline;
// coreParameters.mediacodecAVClevel = MediaCodecInfo.CodecProfileLevel.AVCLevel31;
// }
// videoFormat.setInteger(MediaFormat.KEY_PROFILE, coreParameters.mediacodecAVCProfile);
// //level must be set even below M
// videoFormat.setInteger(MediaFormat.KEY_LEVEL, coreParameters.mediacodecAVClevel);
// }
} catch (IOException e) {
LogTools.trace(e);
return null;
}
return result;
}
private static boolean isArrayContain(int[] src, int target) {
for (int color : src) {
if (color == target) {
return true;
}
}
return false;
}
private static boolean isProfileContain(MediaCodecInfo.CodecProfileLevel[] src, int target) {
for (MediaCodecInfo.CodecProfileLevel color : src) {
if (color.profile == target) {
return true;
}
}
return false;
}
}

View File

@@ -0,0 +1,88 @@
package me.lake.librestreaming.core;
import android.media.MediaFormat;
import java.nio.ByteBuffer;
import me.lake.librestreaming.tools.ByteArrayTools;
public class Packager {
public static class H264Packager {
public static byte[] generateAVCDecoderConfigurationRecord(MediaFormat mediaFormat) {
ByteBuffer SPSByteBuff = mediaFormat.getByteBuffer("csd-0");
SPSByteBuff.position(4);
ByteBuffer PPSByteBuff = mediaFormat.getByteBuffer("csd-1");
PPSByteBuff.position(4);
int spslength = SPSByteBuff.remaining();
int ppslength = PPSByteBuff.remaining();
int length = 11 + spslength + ppslength;
byte[] result = new byte[length];
SPSByteBuff.get(result, 8, spslength);
PPSByteBuff.get(result, 8 + spslength + 3, ppslength);
/**
* UB[8]configurationVersion
* UB[8]AVCProfileIndication
* UB[8]profile_compatibility
* UB[8]AVCLevelIndication
* UB[8]lengthSizeMinusOne
*/
result[0] = 0x01;
result[1] = result[9];
result[2] = result[10];
result[3] = result[11];
result[4] = (byte) 0xFF;
/**
* UB[8]numOfSequenceParameterSets
* UB[16]sequenceParameterSetLength
*/
result[5] = (byte) 0xE1;
ByteArrayTools.intToByteArrayTwoByte(result, 6, spslength);
/**
* UB[8]numOfPictureParameterSets
* UB[16]pictureParameterSetLength
*/
int pos = 8 + spslength;
result[pos] = (byte) 0x01;
ByteArrayTools.intToByteArrayTwoByte(result, pos + 1, ppslength);
return result;
}
}
public static class FLVPackager {
public static final int FLV_TAG_LENGTH = 11;
public static final int FLV_VIDEO_TAG_LENGTH = 5;
public static final int FLV_AUDIO_TAG_LENGTH = 2;
public static final int FLV_TAG_FOOTER_LENGTH = 4;
public static final int NALU_HEADER_LENGTH = 4;
public static void fillFlvVideoTag(byte[] dst, int pos, boolean isAVCSequenceHeader, boolean isIDR, int readDataLength) {
//FrameType&CodecID
dst[pos] = isIDR ? (byte) 0x17 : (byte) 0x27;
//AVCPacketType
dst[pos + 1] = isAVCSequenceHeader ? (byte) 0x00 : (byte) 0x01;
//LAKETODO CompositionTime
dst[pos + 2] = 0x00;
dst[pos + 3] = 0x00;
dst[pos + 4] = 0x00;
if (!isAVCSequenceHeader) {
//NALU HEADER
ByteArrayTools.intToByteArrayFull(dst, pos + 5, readDataLength);
}
}
public static void fillFlvAudioTag(byte[] dst, int pos, boolean isAACSequenceHeader) {
/**
* UB[4] 10=AAC
* UB[2] 3=44kHz
* UB[1] 1=16-bit
* UB[1] 0=MonoSound
*/
dst[pos] = (byte) 0xAE;
dst[pos + 1] = isAACSequenceHeader ? (byte) 0x00 : (byte) 0x01;
}
}
}

View File

@@ -0,0 +1,57 @@
package me.lake.librestreaming.core;
import java.util.LinkedList;
public class RESByteSpeedometer {
private int timeGranularity;
private LinkedList<ByteFrame> byteList;
private final Object syncByteList = new Object();
public RESByteSpeedometer(int timeGranularity) {
this.timeGranularity = timeGranularity;
byteList = new LinkedList<>();
}
public int getSpeed() {
synchronized (syncByteList) {
long now = System.currentTimeMillis();
trim(now);
long sumByte = 0;
for (ByteFrame byteFrame : byteList) {
sumByte += byteFrame.bytenum;
}
return (int) (sumByte * 1000 / timeGranularity);
}
}
public void gain(int byteCount) {
synchronized (syncByteList) {
long now = System.currentTimeMillis();
byteList.addLast(new ByteFrame(now, byteCount));
trim(now);
}
}
private void trim(long time) {
while (!byteList.isEmpty() && (time - byteList.getFirst().time) > timeGranularity) {
byteList.removeFirst();
}
}
public void reset() {
synchronized (syncByteList) {
byteList.clear();
}
}
private class ByteFrame {
long time;
long bytenum;
public ByteFrame(long time, long bytenum) {
this.time = time;
this.bytenum = bytenum;
}
}
}

View File

@@ -0,0 +1,44 @@
package me.lake.librestreaming.core;
public class RESFrameRateMeter {
private static final long TIMETRAVEL = 1;
private static final long TIMETRAVEL_MS = TIMETRAVEL * 1000;
private static final long GET_TIMETRAVEL_MS = 2 * TIMETRAVEL_MS;
private int times;
private float lastFps;
private long lastUpdateTime;
public RESFrameRateMeter() {
times = 0;
lastFps = 0;
lastUpdateTime = 0;
}
public void count() {
long now = System.currentTimeMillis();
if (lastUpdateTime == 0) {
lastUpdateTime = now;
}
if ((now - lastUpdateTime) > TIMETRAVEL_MS) {
lastFps = (((float) times) / (now - lastUpdateTime)) * 1000.0f;
lastUpdateTime = now;
times = 0;
}
++times;
}
public float getFps() {
if ((System.currentTimeMillis() - lastUpdateTime) > GET_TIMETRAVEL_MS) {
return 0;
} else {
return lastFps;
}
}
public void reSet() {
times = 0;
lastFps = 0;
lastUpdateTime = 0;
}
}

View File

@@ -0,0 +1,891 @@
package me.lake.librestreaming.core;
import android.annotation.TargetApi;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.opengl.EGL14;
import android.opengl.EGLExt;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.os.SystemClock;
import android.view.Surface;
import java.nio.Buffer;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.nio.ShortBuffer;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import me.lake.librestreaming.client.CallbackDelivery;
import me.lake.librestreaming.core.listener.RESScreenShotListener;
import me.lake.librestreaming.core.listener.RESVideoChangeListener;
import me.lake.librestreaming.encoder.MediaVideoEncoder;
import me.lake.librestreaming.filter.hardvideofilter.BaseHardVideoFilter;
import me.lake.librestreaming.model.MediaCodecGLWapper;
import me.lake.librestreaming.model.OffScreenGLWapper;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.model.ScreenGLWapper;
import me.lake.librestreaming.model.Size;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.tools.LogTools;
public class RESHardVideoCore implements RESVideoCore {
RESCoreParameters resCoreParameters;
private final Object syncOp = new Object();
//filter
private Lock lockVideoFilter = null;
private BaseHardVideoFilter videoFilter;
private MediaCodec dstVideoEncoder;
private MediaFormat dstVideoFormat;
private final Object syncPreview = new Object();
private HandlerThread videoGLHandlerThread;
private VideoGLHandler videoGLHander;
final private Object syncResScreenShotListener = new Object();
private RESScreenShotListener resScreenShotListener;
final private Object syncResVideoChangeListener = new Object();
private RESVideoChangeListener resVideoChangeListener;
private final Object syncIsLooping = new Object();
private boolean isPreviewing = false;
private boolean isStreaming = false;
private int loopingInterval;
private boolean isEnableMirror;
private boolean isEnablePreviewMirror;
private boolean isEnableStreamMirror;
public RESHardVideoCore(RESCoreParameters parameters) {
resCoreParameters = parameters;
lockVideoFilter = new ReentrantLock(false);
}
public void onFrameAvailable() {
if (videoGLHandlerThread != null) {
videoGLHander.addFrameNum();
}
}
@Override
public boolean prepare(RESConfig resConfig) {
synchronized (syncOp) {
resCoreParameters.renderingMode = resConfig.getRenderingMode();
resCoreParameters.mediacdoecAVCBitRate = resConfig.getBitRate();
resCoreParameters.videoBufferQueueNum = resConfig.getVideoBufferQueueNum();
resCoreParameters.mediacodecAVCIFrameInterval = resConfig.getVideoGOP();
resCoreParameters.mediacodecAVCFrameRate = resCoreParameters.videoFPS;
loopingInterval = 1000 / resCoreParameters.videoFPS;
dstVideoFormat = new MediaFormat();
videoGLHandlerThread = new HandlerThread("GLThread");
videoGLHandlerThread.start();
videoGLHander = new VideoGLHandler(videoGLHandlerThread.getLooper());
videoGLHander.sendEmptyMessage(VideoGLHandler.WHAT_INIT);
return true;
}
}
@Override
public void startPreview(SurfaceTexture surfaceTexture, int visualWidth, int visualHeight) {
synchronized (syncOp) {
videoGLHander.sendMessage(videoGLHander.obtainMessage(VideoGLHandler.WHAT_START_PREVIEW,
visualWidth, visualHeight, surfaceTexture));
synchronized (syncIsLooping) {
if (!isPreviewing && !isStreaming) {
videoGLHander.removeMessages(VideoGLHandler.WHAT_DRAW);
videoGLHander.sendMessageDelayed(videoGLHander.obtainMessage(VideoGLHandler.WHAT_DRAW, SystemClock.uptimeMillis() + loopingInterval), loopingInterval);
}
isPreviewing = true;
}
}
}
@Override
public void updatePreview(int visualWidth, int visualHeight) {
synchronized (syncOp) {
synchronized (syncPreview) {
videoGLHander.updatePreview(visualWidth, visualHeight);
}
}
}
@Override
public void stopPreview(boolean releaseTexture) {
synchronized (syncOp) {
videoGLHander.sendMessage(videoGLHander.obtainMessage(VideoGLHandler.WHAT_STOP_PREVIEW, releaseTexture));
synchronized (syncIsLooping) {
isPreviewing = false;
}
}
}
@Override
public boolean startStreaming(RESFlvDataCollecter flvDataCollecter) {
synchronized (syncOp) {
videoGLHander.sendMessage(videoGLHander.obtainMessage(VideoGLHandler.WHAT_START_STREAMING, flvDataCollecter));
synchronized (syncIsLooping) {
if (!isPreviewing && !isStreaming) {
videoGLHander.removeMessages(VideoGLHandler.WHAT_DRAW);
videoGLHander.sendMessageDelayed(videoGLHander.obtainMessage(VideoGLHandler.WHAT_DRAW, SystemClock.uptimeMillis() + loopingInterval), loopingInterval);
}
isStreaming = true;
}
}
return true;
}
@Override
public void updateCamTexture(SurfaceTexture camTex) {
synchronized (syncOp) {
if (videoGLHander != null) {
videoGLHander.updateCamTexture(camTex);
}
}
}
@Override
public boolean stopStreaming() {
synchronized (syncOp) {
videoGLHander.sendEmptyMessage(VideoGLHandler.WHAT_STOP_STREAMING);
synchronized (syncIsLooping) {
isStreaming = false;
}
}
return true;
}
@Override
public boolean destroy() {
synchronized (syncOp) {
videoGLHander.sendEmptyMessage(VideoGLHandler.WHAT_UNINIT);
videoGLHandlerThread.quitSafely();
try {
videoGLHandlerThread.join();
} catch (InterruptedException ignored) {
}
videoGLHandlerThread = null;
videoGLHander = null;
return true;
}
}
@TargetApi(Build.VERSION_CODES.KITKAT)
@Override
public void reSetVideoBitrate(int bitrate) {
synchronized (syncOp) {
if (videoGLHander != null) {
videoGLHander.sendMessage(videoGLHander.obtainMessage(VideoGLHandler.WHAT_RESET_BITRATE, bitrate, 0));
resCoreParameters.mediacdoecAVCBitRate = bitrate;
dstVideoFormat.setInteger(MediaFormat.KEY_BIT_RATE, resCoreParameters.mediacdoecAVCBitRate);
}
}
}
@TargetApi(Build.VERSION_CODES.KITKAT)
@Override
public int getVideoBitrate() {
synchronized (syncOp) {
return resCoreParameters.mediacdoecAVCBitRate;
}
}
@Override
public void reSetVideoFPS(int fps) {
synchronized (syncOp) {
resCoreParameters.videoFPS = fps;
loopingInterval = 1000 / resCoreParameters.videoFPS;
}
}
@Override
public void reSetVideoSize(RESCoreParameters newParameters) {
synchronized (syncOp) {
synchronized (syncIsLooping) {
if (isPreviewing || isStreaming) {
videoGLHander.sendMessage(videoGLHander.obtainMessage(VideoGLHandler.WHAT_RESET_VIDEO, newParameters));
}
}
}
}
@Override
public void setCurrentCamera(int cameraIndex) {
mCameraId = cameraIndex;
synchronized (syncOp) {
if (videoGLHander != null) {
videoGLHander.updateCameraIndex(cameraIndex);
}
}
}
public BaseHardVideoFilter acquireVideoFilter() {
lockVideoFilter.lock();
return videoFilter;
}
public void releaseVideoFilter() {
lockVideoFilter.unlock();
}
public void setVideoFilter(BaseHardVideoFilter baseHardVideoFilter) {
lockVideoFilter.lock();
videoFilter = baseHardVideoFilter;
lockVideoFilter.unlock();
}
@Override
public void takeScreenShot(RESScreenShotListener listener) {
synchronized (syncResScreenShotListener) {
resScreenShotListener = listener;
}
}
public void setMirror(boolean isEnableMirror,boolean isEnablePreviewMirror,boolean isEnableStreamMirror) {
this.isEnableMirror = isEnableMirror;
this.isEnablePreviewMirror = isEnablePreviewMirror;
this.isEnableStreamMirror = isEnableStreamMirror;
}
@Override
public void setVideoChangeListener(RESVideoChangeListener listener) {
synchronized (syncResVideoChangeListener) {
resVideoChangeListener = listener;
}
}
@Override
public float getDrawFrameRate() {
synchronized (syncOp) {
return videoGLHander == null ? 0 : videoGLHander.getDrawFrameRate();
}
}
private class VideoGLHandler extends Handler {
static final int WHAT_INIT = 0x001;
static final int WHAT_UNINIT = 0x002;
static final int WHAT_FRAME = 0x003;
static final int WHAT_DRAW = 0x004;
static final int WHAT_RESET_VIDEO = 0x005;
static final int WHAT_START_PREVIEW = 0x010;
static final int WHAT_STOP_PREVIEW = 0x020;
static final int WHAT_START_STREAMING = 0x100;
static final int WHAT_STOP_STREAMING = 0x200;
static final int WHAT_RESET_BITRATE = 0x300;
private Size screenSize;
//=========================
public static final int FILTER_LOCK_TOLERATION = 3;//3ms
private final Object syncFrameNum = new Object();
private int frameNum = 0;
//gl stuff
private final Object syncCameraTex = new Object();
private SurfaceTexture cameraTexture;
private SurfaceTexture screenTexture;
private MediaCodecGLWapper mediaCodecGLWapper;
private ScreenGLWapper screenGLWapper;
private OffScreenGLWapper offScreenGLWapper;
private int sample2DFrameBuffer;
private int sample2DFrameBufferTexture;
private int frameBuffer;
private int frameBufferTexture;
private FloatBuffer shapeVerticesBuffer;
private FloatBuffer mediaCodecTextureVerticesBuffer;
private FloatBuffer screenTextureVerticesBuffer;
private int currCamera;
private final Object syncCameraTextureVerticesBuffer = new Object();
private FloatBuffer camera2dTextureVerticesBuffer;
private FloatBuffer cameraTextureVerticesBuffer;
private ShortBuffer drawIndecesBuffer;
private BaseHardVideoFilter innerVideoFilter = null;
private RESFrameRateMeter drawFrameRateMeter;
private int directionFlag;
//sender
private VideoSenderThread videoSenderThread;
boolean hasNewFrame = false;
public boolean dropNextFrame = false;
float[] textureMatrix;
public VideoGLHandler(Looper looper) {
super(looper);
screenGLWapper = null;
mediaCodecGLWapper = null;
drawFrameRateMeter = new RESFrameRateMeter();
screenSize = new Size(1, 1);
initBuffer();
}
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case WHAT_FRAME: {
GLHelper.makeCurrent(offScreenGLWapper);
synchronized (syncFrameNum) {
synchronized (syncCameraTex) {
if (cameraTexture != null) {
while (frameNum != 0) {
cameraTexture.updateTexImage();
--frameNum;
if (!dropNextFrame) {
hasNewFrame = true;
} else {
dropNextFrame = false;
hasNewFrame=false;
}
}
} else {
break;
}
}
}
drawSample2DFrameBuffer(cameraTexture);
}
break;
case WHAT_DRAW: {
long time = (Long) msg.obj;
long interval = time + loopingInterval - SystemClock.uptimeMillis();
synchronized (syncIsLooping) {
if (isPreviewing || isStreaming) {
if (interval > 0) {
videoGLHander.sendMessageDelayed(videoGLHander.obtainMessage(
VideoGLHandler.WHAT_DRAW,
SystemClock.uptimeMillis() + interval),
interval);
} else {
videoGLHander.sendMessage(videoGLHander.obtainMessage(
VideoGLHandler.WHAT_DRAW,
SystemClock.uptimeMillis() + loopingInterval));
}
}
}
if (hasNewFrame) {
drawFrameBuffer();
drawMediaCodec(time * 1000000);
drawScreen();
encoderMp4(frameBufferTexture);//编码MP4
drawFrameRateMeter.count();
hasNewFrame = false;
}
}
break;
case WHAT_INIT: {
initOffScreenGL();
}
break;
case WHAT_UNINIT: {
lockVideoFilter.lock();
if (innerVideoFilter != null) {
innerVideoFilter.onDestroy();
innerVideoFilter = null;
}
lockVideoFilter.unlock();
uninitOffScreenGL();
}
break;
case WHAT_START_PREVIEW: {
initScreenGL((SurfaceTexture) msg.obj);
updatePreview(msg.arg1, msg.arg2);
}
break;
case WHAT_STOP_PREVIEW: {
uninitScreenGL();
boolean releaseTexture = (boolean) msg.obj;
if (releaseTexture) {
screenTexture.release();
screenTexture = null;
}
}
break;
case WHAT_START_STREAMING: {
if (dstVideoEncoder == null) {
dstVideoEncoder = MediaCodecHelper.createHardVideoMediaCodec(resCoreParameters, dstVideoFormat);
if (dstVideoEncoder == null) {
throw new RuntimeException("create Video MediaCodec failed");
}
}
dstVideoEncoder.configure(dstVideoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
initMediaCodecGL(dstVideoEncoder.createInputSurface());
dstVideoEncoder.start();
videoSenderThread = new VideoSenderThread("VideoSenderThread", dstVideoEncoder, (RESFlvDataCollecter) msg.obj);
videoSenderThread.start();
}
break;
case WHAT_STOP_STREAMING: {
videoSenderThread.quit();
try {
videoSenderThread.join();
} catch (InterruptedException e) {
LogTools.trace("RESHardVideoCore,stopStreaming()failed", e);
}
videoSenderThread = null;
uninitMediaCodecGL();
dstVideoEncoder.stop();
dstVideoEncoder.release();
dstVideoEncoder = null;
}
break;
case WHAT_RESET_BITRATE: {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT && mediaCodecGLWapper != null) {
Bundle bitrateBundle = new Bundle();
bitrateBundle.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, msg.arg1);
dstVideoEncoder.setParameters(bitrateBundle);
}
}
break;
case WHAT_RESET_VIDEO: {
RESCoreParameters newParameters = (RESCoreParameters) msg.obj;
resCoreParameters.videoWidth = newParameters.videoWidth;
resCoreParameters.videoHeight = newParameters.videoHeight;
resCoreParameters.cropRatio = newParameters.cropRatio;
updateCameraIndex(currCamera);
resetFrameBuff();
if (mediaCodecGLWapper != null) {
uninitMediaCodecGL();
dstVideoEncoder.stop();
dstVideoEncoder.release();
dstVideoEncoder = MediaCodecHelper.createHardVideoMediaCodec(resCoreParameters, dstVideoFormat);
if (dstVideoEncoder == null) {
throw new RuntimeException("create Video MediaCodec failed");
}
dstVideoEncoder.configure(dstVideoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
initMediaCodecGL(dstVideoEncoder.createInputSurface());
dstVideoEncoder.start();
videoSenderThread.updateMediaCodec(dstVideoEncoder);
}
synchronized (syncResVideoChangeListener) {
if(resVideoChangeListener!=null) {
CallbackDelivery.i().post(new RESVideoChangeListener.RESVideoChangeRunable(resVideoChangeListener,
resCoreParameters.videoWidth,
resCoreParameters.videoHeight));
}
}
}
break;
default:
}
}
private void drawSample2DFrameBuffer(SurfaceTexture cameraTexture) {
if(isEnableMirror){
screenTextureVerticesBuffer = GLHelper.adjustTextureFlip(isEnablePreviewMirror);
mediaCodecTextureVerticesBuffer = GLHelper.adjustTextureFlip(isEnableStreamMirror);
}
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, sample2DFrameBuffer);
GLES20.glUseProgram(offScreenGLWapper.cam2dProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, OVERWATCH_TEXTURE_ID);
GLES20.glUniform1i(offScreenGLWapper.cam2dTextureLoc, 0);
synchronized (syncCameraTextureVerticesBuffer) {
GLHelper.enableVertex(offScreenGLWapper.cam2dPostionLoc, offScreenGLWapper.cam2dTextureCoordLoc,
shapeVerticesBuffer, camera2dTextureVerticesBuffer);
}
textureMatrix = new float[16];
cameraTexture.getTransformMatrix(textureMatrix);
//encoder mp4 start
//processStMatrix(textureMatrix, mCameraID == Camera.CameraInfo.CAMERA_FACING_FRONT);
//encoder mp4 end
GLES20.glUniformMatrix4fv(offScreenGLWapper.cam2dTextureMatrix, 1, false, textureMatrix, 0);
GLES20.glViewport(0, 0, resCoreParameters.previewVideoHeight, resCoreParameters.previewVideoWidth);//resCoreParameters.videoWidth, resCoreParameters.videoHeight
doGLDraw();
GLES20.glFinish();
GLHelper.disableVertex(offScreenGLWapper.cam2dPostionLoc, offScreenGLWapper.cam2dTextureCoordLoc);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
GLES20.glUseProgram(0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}
private void drawOriginFrameBuffer() {
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer);
GLES20.glUseProgram(offScreenGLWapper.camProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, sample2DFrameBufferTexture);
GLES20.glUniform1i(offScreenGLWapper.camTextureLoc, 0);
synchronized (syncCameraTextureVerticesBuffer) {
GLHelper.enableVertex(offScreenGLWapper.camPostionLoc, offScreenGLWapper.camTextureCoordLoc,
shapeVerticesBuffer, cameraTextureVerticesBuffer);
}
GLES20.glViewport(0, 0, resCoreParameters.previewVideoHeight, resCoreParameters.previewVideoWidth);
doGLDraw();
GLES20.glFinish();
GLHelper.disableVertex(offScreenGLWapper.camPostionLoc, offScreenGLWapper.camTextureCoordLoc);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glUseProgram(0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}
private void drawFrameBuffer() {
GLHelper.makeCurrent(offScreenGLWapper);
boolean isFilterLocked = lockVideoFilter();
long starttime = System.currentTimeMillis();
if (isFilterLocked) {
if (videoFilter != innerVideoFilter) {
if (innerVideoFilter != null) {
innerVideoFilter.onDestroy();
}
innerVideoFilter = videoFilter;
if (innerVideoFilter != null) {
innerVideoFilter.onInit(resCoreParameters.previewVideoHeight, resCoreParameters.previewVideoWidth);//resCoreParameters.videoWidth, resCoreParameters.videoHeight
}
}
if (innerVideoFilter != null) {
synchronized (syncCameraTextureVerticesBuffer) {
innerVideoFilter.onDirectionUpdate(directionFlag);
innerVideoFilter.onDraw(sample2DFrameBufferTexture, frameBuffer, shapeVerticesBuffer, cameraTextureVerticesBuffer);
}
} else {
drawOriginFrameBuffer();
}
unlockVideoFilter();
} else {
drawOriginFrameBuffer();
}
LogTools.e("滤镜耗时:"+(System.currentTimeMillis()-starttime));
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer);
checkScreenShot();
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}
private void drawMediaCodec(long currTime) {
if (mediaCodecGLWapper != null) {
GLHelper.makeCurrent(mediaCodecGLWapper);
GLES20.glUseProgram(mediaCodecGLWapper.drawProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, frameBufferTexture);
GLES20.glUniform1i(mediaCodecGLWapper.drawTextureLoc, 0);
GLHelper.enableVertex(mediaCodecGLWapper.drawPostionLoc, mediaCodecGLWapper.drawTextureCoordLoc,
shapeVerticesBuffer, mediaCodecTextureVerticesBuffer);
doGLDraw();
GLES20.glFinish();
GLHelper.disableVertex(mediaCodecGLWapper.drawPostionLoc, mediaCodecGLWapper.drawTextureCoordLoc);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glUseProgram(0);
EGLExt.eglPresentationTimeANDROID(mediaCodecGLWapper.eglDisplay, mediaCodecGLWapper.eglSurface, currTime);
if (!EGL14.eglSwapBuffers(mediaCodecGLWapper.eglDisplay, mediaCodecGLWapper.eglSurface)) {
throw new RuntimeException("eglSwapBuffers,failed!");
}
}
}
private void drawScreen() {
if (screenGLWapper != null) {
GLHelper.makeCurrent(screenGLWapper);
GLES20.glUseProgram(screenGLWapper.drawProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, frameBufferTexture);
GLES20.glUniform1i(screenGLWapper.drawTextureLoc, 0);
GLHelper.enableVertex(screenGLWapper.drawPostionLoc, screenGLWapper.drawTextureCoordLoc,
shapeVerticesBuffer, screenTextureVerticesBuffer);
GLES20.glViewport(0, 0, screenSize.getWidth(), screenSize.getHeight());
doGLDraw();
GLES20.glFinish();
GLHelper.disableVertex(screenGLWapper.drawPostionLoc, screenGLWapper.drawTextureCoordLoc);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glUseProgram(0);
if (!EGL14.eglSwapBuffers(screenGLWapper.eglDisplay, screenGLWapper.eglSurface)) {
throw new RuntimeException("eglSwapBuffers,failed!");
}
}
}
private void doGLDraw() {
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawIndecesBuffer.limit(), GLES20.GL_UNSIGNED_SHORT, drawIndecesBuffer);
}
/**
* @return ture if filter locked & filter!=null
*/
private boolean lockVideoFilter() {
try {
return lockVideoFilter.tryLock(FILTER_LOCK_TOLERATION, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
return false;
}
}
private void unlockVideoFilter() {
lockVideoFilter.unlock();
}
private void checkScreenShot() {
synchronized (syncResScreenShotListener) {
if (resScreenShotListener != null) {
Bitmap result = null;
try {
IntBuffer pixBuffer = IntBuffer.allocate(resCoreParameters.previewVideoHeight * resCoreParameters.previewVideoWidth);
GLES20.glReadPixels(0, 0, resCoreParameters.previewVideoHeight, resCoreParameters.previewVideoWidth, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixBuffer);
int[] glPixel = pixBuffer.array();
int[] argbPixel = new int[resCoreParameters.previewVideoHeight * resCoreParameters.previewVideoWidth];
ColorHelper.FIXGLPIXEL(glPixel, argbPixel,
resCoreParameters.previewVideoHeight,
resCoreParameters.previewVideoWidth
);
result = Bitmap.createBitmap(argbPixel,
resCoreParameters.previewVideoHeight,
resCoreParameters.previewVideoWidth,
Bitmap.Config.ARGB_8888);
if(isEnableMirror && isEnablePreviewMirror){
Matrix mx = new Matrix();
mx.setScale(-1, 1); //产生镜像
result = Bitmap.createBitmap(result,0,0,result.getWidth(),result.getHeight(),mx,true);
}
System.out.println("resCoreParameters.previewVideoWidth = " + resCoreParameters.previewVideoWidth);
System.out.println("resCoreParameters.previewVideoHeight = " + resCoreParameters.previewVideoHeight);
} catch (Exception e) {
LogTools.trace("takescreenshot failed:", e);
} finally {
CallbackDelivery.i().post(new RESScreenShotListener.RESScreenShotListenerRunable(resScreenShotListener, result));
resScreenShotListener = null;
}
}
}
}
private void initOffScreenGL() {
if (offScreenGLWapper == null) {
offScreenGLWapper = new OffScreenGLWapper();
GLHelper.initOffScreenGL(offScreenGLWapper);
GLHelper.makeCurrent(offScreenGLWapper);
//camera
offScreenGLWapper.camProgram = GLHelper.createCameraProgram();
GLES20.glUseProgram(offScreenGLWapper.camProgram);
offScreenGLWapper.camTextureLoc = GLES20.glGetUniformLocation(offScreenGLWapper.camProgram, "uTexture");
offScreenGLWapper.camPostionLoc = GLES20.glGetAttribLocation(offScreenGLWapper.camProgram, "aPosition");
offScreenGLWapper.camTextureCoordLoc = GLES20.glGetAttribLocation(offScreenGLWapper.camProgram, "aTextureCoord");
//camera2d
offScreenGLWapper.cam2dProgram = GLHelper.createCamera2DProgram();
GLES20.glUseProgram(offScreenGLWapper.cam2dProgram);
offScreenGLWapper.cam2dTextureLoc = GLES20.glGetUniformLocation(offScreenGLWapper.cam2dProgram, "uTexture");
offScreenGLWapper.cam2dPostionLoc = GLES20.glGetAttribLocation(offScreenGLWapper.cam2dProgram, "aPosition");
offScreenGLWapper.cam2dTextureCoordLoc = GLES20.glGetAttribLocation(offScreenGLWapper.cam2dProgram, "aTextureCoord");
offScreenGLWapper.cam2dTextureMatrix = GLES20.glGetUniformLocation(offScreenGLWapper.cam2dProgram, "uTextureMatrix");
int[] fb = new int[1], fbt = new int[1];
GLHelper.createCamFrameBuff(fb, fbt, resCoreParameters.previewVideoHeight, resCoreParameters.previewVideoWidth);//resCoreParameters.videoWidth, resCoreParameters.videoHeight
sample2DFrameBuffer = fb[0];
sample2DFrameBufferTexture = fbt[0];
GLHelper.createCamFrameBuff(fb, fbt, resCoreParameters.previewVideoHeight, resCoreParameters.previewVideoWidth);//resCoreParameters.videoWidth, resCoreParameters.videoHeight
frameBuffer = fb[0];
frameBufferTexture = fbt[0];
} else {
throw new IllegalStateException("initOffScreenGL without uninitOffScreenGL");
}
}
private void uninitOffScreenGL() {
if (offScreenGLWapper != null) {
GLHelper.makeCurrent(offScreenGLWapper);
GLES20.glDeleteProgram(offScreenGLWapper.camProgram);
GLES20.glDeleteProgram(offScreenGLWapper.cam2dProgram);
GLES20.glDeleteFramebuffers(1, new int[]{frameBuffer}, 0);
GLES20.glDeleteTextures(1, new int[]{frameBufferTexture}, 0);
GLES20.glDeleteFramebuffers(1, new int[]{sample2DFrameBuffer}, 0);
GLES20.glDeleteTextures(1, new int[]{sample2DFrameBufferTexture}, 0);
EGL14.eglDestroySurface(offScreenGLWapper.eglDisplay, offScreenGLWapper.eglSurface);
EGL14.eglDestroyContext(offScreenGLWapper.eglDisplay, offScreenGLWapper.eglContext);
EGL14.eglTerminate(offScreenGLWapper.eglDisplay);
EGL14.eglMakeCurrent(offScreenGLWapper.eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
} else {
throw new IllegalStateException("uninitOffScreenGL without initOffScreenGL");
}
}
private void initScreenGL(SurfaceTexture screenSurfaceTexture) {
if (screenGLWapper == null) {
screenTexture = screenSurfaceTexture;
screenGLWapper = new ScreenGLWapper();
GLHelper.initScreenGL(screenGLWapper, offScreenGLWapper.eglContext, screenSurfaceTexture);
GLHelper.makeCurrent(screenGLWapper);
screenGLWapper.drawProgram = GLHelper.createScreenProgram();
GLES20.glUseProgram(screenGLWapper.drawProgram);
screenGLWapper.drawTextureLoc = GLES20.glGetUniformLocation(screenGLWapper.drawProgram, "uTexture");
screenGLWapper.drawPostionLoc = GLES20.glGetAttribLocation(screenGLWapper.drawProgram, "aPosition");
screenGLWapper.drawTextureCoordLoc = GLES20.glGetAttribLocation(screenGLWapper.drawProgram, "aTextureCoord");
} else {
throw new IllegalStateException("initScreenGL without unInitScreenGL");
}
}
private void uninitScreenGL() {
if (screenGLWapper != null) {
GLHelper.makeCurrent(screenGLWapper);
GLES20.glDeleteProgram(screenGLWapper.drawProgram);
EGL14.eglDestroySurface(screenGLWapper.eglDisplay, screenGLWapper.eglSurface);
EGL14.eglDestroyContext(screenGLWapper.eglDisplay, screenGLWapper.eglContext);
EGL14.eglTerminate(screenGLWapper.eglDisplay);
EGL14.eglMakeCurrent(screenGLWapper.eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
screenGLWapper = null;
} else {
throw new IllegalStateException("unInitScreenGL without initScreenGL");
}
}
private void initMediaCodecGL(Surface mediacodecSurface) {
if (mediaCodecGLWapper == null) {
mediaCodecGLWapper = new MediaCodecGLWapper();
GLHelper.initMediaCodecGL(mediaCodecGLWapper, offScreenGLWapper.eglContext, mediacodecSurface);
GLHelper.makeCurrent(mediaCodecGLWapper);
GLES20.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
mediaCodecGLWapper.drawProgram = GLHelper.createMediaCodecProgram();
GLES20.glUseProgram(mediaCodecGLWapper.drawProgram);
mediaCodecGLWapper.drawTextureLoc = GLES20.glGetUniformLocation(mediaCodecGLWapper.drawProgram, "uTexture");
mediaCodecGLWapper.drawPostionLoc = GLES20.glGetAttribLocation(mediaCodecGLWapper.drawProgram, "aPosition");
mediaCodecGLWapper.drawTextureCoordLoc = GLES20.glGetAttribLocation(mediaCodecGLWapper.drawProgram, "aTextureCoord");
} else {
throw new IllegalStateException("initMediaCodecGL without uninitMediaCodecGL");
}
}
private void uninitMediaCodecGL() {
if (mediaCodecGLWapper != null) {
GLHelper.makeCurrent(mediaCodecGLWapper);
GLES20.glDeleteProgram(mediaCodecGLWapper.drawProgram);
EGL14.eglDestroySurface(mediaCodecGLWapper.eglDisplay, mediaCodecGLWapper.eglSurface);
EGL14.eglDestroyContext(mediaCodecGLWapper.eglDisplay, mediaCodecGLWapper.eglContext);
EGL14.eglTerminate(mediaCodecGLWapper.eglDisplay);
EGL14.eglMakeCurrent(mediaCodecGLWapper.eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
mediaCodecGLWapper = null;
} else {
throw new IllegalStateException("uninitMediaCodecGL without initMediaCodecGL");
}
}
private void resetFrameBuff() {
GLHelper.makeCurrent(offScreenGLWapper);
GLES20.glDeleteFramebuffers(1, new int[]{frameBuffer}, 0);
GLES20.glDeleteTextures(1, new int[]{frameBufferTexture}, 0);
GLES20.glDeleteFramebuffers(1, new int[]{sample2DFrameBuffer}, 0);
GLES20.glDeleteTextures(1, new int[]{sample2DFrameBufferTexture}, 0);
int[] fb = new int[1], fbt = new int[1];
GLHelper.createCamFrameBuff(fb, fbt, resCoreParameters.videoWidth, resCoreParameters.videoHeight);
sample2DFrameBuffer = fb[0];
sample2DFrameBufferTexture = fbt[0];
GLHelper.createCamFrameBuff(fb, fbt, resCoreParameters.videoWidth, resCoreParameters.videoHeight);
frameBuffer = fb[0];
frameBufferTexture = fbt[0];
}
private void initBuffer() {
shapeVerticesBuffer = GLHelper.getShapeVerticesBuffer();
mediaCodecTextureVerticesBuffer = GLHelper.getMediaCodecTextureVerticesBuffer();
screenTextureVerticesBuffer = GLHelper.getScreenTextureVerticesBuffer();
updateCameraIndex(currCamera);
drawIndecesBuffer = GLHelper.getDrawIndecesBuffer();
cameraTextureVerticesBuffer = GLHelper.getCameraTextureVerticesBuffer();
}
public void updateCameraIndex(int cameraIndex) {
synchronized (syncCameraTextureVerticesBuffer) {
currCamera = cameraIndex;
if (currCamera == Camera.CameraInfo.CAMERA_FACING_FRONT) {
directionFlag = resCoreParameters.frontCameraDirectionMode ^ RESConfig.DirectionMode.FLAG_DIRECTION_FLIP_HORIZONTAL;
} else {
directionFlag = resCoreParameters.backCameraDirectionMode ^ RESConfig.DirectionMode.FLAG_DIRECTION_ROATATION_0;
}
camera2dTextureVerticesBuffer = GLHelper.getCamera2DTextureVerticesBuffer(directionFlag, resCoreParameters.cropRatio);
}
}
public float getDrawFrameRate() {
return drawFrameRateMeter.getFps();
}
public void updateCamTexture(SurfaceTexture surfaceTexture) {
synchronized (syncCameraTex) {
if (surfaceTexture != cameraTexture) {
cameraTexture = surfaceTexture;
frameNum = 0;
dropNextFrame = true;
}
}
}
public void addFrameNum() {
synchronized (syncFrameNum) {
++frameNum;
this.removeMessages(WHAT_FRAME);
this.sendMessageAtFrontOfQueue(this.obtainMessage(VideoGLHandler.WHAT_FRAME));
}
}
public void updatePreview(int w, int h) {
screenSize = new Size(w, h);
}
public int getBufferTexture(){
return frameBufferTexture;
}
private void encoderMp4(int BufferTexture) {
synchronized (this) {
if (mVideoEncoder != null) {
processStMatrix(textureMatrix, mCameraId == Camera.CameraInfo.CAMERA_FACING_FRONT);
if (mNeedResetEglContext) {
mVideoEncoder.setEglContext(EGL14.eglGetCurrentContext(), videoGLHander.getBufferTexture());
mNeedResetEglContext = false;
}
mVideoEncoder.setPreviewWH(resCoreParameters.previewVideoHeight, resCoreParameters.previewVideoWidth);
mVideoEncoder.frameAvailableSoon(textureMatrix, mVideoEncoder.getMvpMatrix());
}
}
}
}
//encoder mp4 start
private MediaVideoEncoder mVideoEncoder;
private boolean mNeedResetEglContext = true;
private int mCameraId = -1;
public void setVideoEncoder(final MediaVideoEncoder encoder) {
synchronized (this) {
if (encoder != null) {
encoder.setEglContext(EGL14.eglGetCurrentContext(), videoGLHander.getBufferTexture());
}
mVideoEncoder = encoder;
}
}
private void processStMatrix(float[] matrix, boolean needMirror) {
if (needMirror && matrix != null && matrix.length == 16) {
for (int i = 0; i < 3; i++) {
matrix[4 * i] = -matrix[4 * i];
}
if (matrix[4 * 3] == 0) {
matrix[4 * 3] = 1.0f;
} else if (matrix[4 * 3] == 1.0f) {
matrix[4 * 3] = 0f;
}
}
return;
}
public void setNeedResetEglContext(boolean bol){
mNeedResetEglContext = bol;
}
//encoder mp4 end
}

View File

@@ -0,0 +1,232 @@
package me.lake.librestreaming.core;
import android.media.AudioFormat;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.os.SystemClock;
import java.nio.ByteBuffer;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import me.lake.librestreaming.filter.softaudiofilter.BaseSoftAudioFilter;
import me.lake.librestreaming.model.RESAudioBuff;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.tools.LogTools;
public class RESSoftAudioCore {
RESCoreParameters resCoreParameters;
private final Object syncOp = new Object();
private MediaCodec dstAudioEncoder;
private MediaFormat dstAudioFormat;
//filter
private Lock lockAudioFilter = null;
private BaseSoftAudioFilter audioFilter;
//AudioBuffs
//buffers to handle buff from queueAudio
private RESAudioBuff[] orignAudioBuffs;
private int lastAudioQueueBuffIndex;
//buffer to handle buff from orignAudioBuffs
private RESAudioBuff orignAudioBuff;
private RESAudioBuff filteredAudioBuff;
private AudioFilterHandler audioFilterHandler;
private HandlerThread audioFilterHandlerThread;
private AudioSenderThread audioSenderThread;
public RESSoftAudioCore(RESCoreParameters parameters) {
resCoreParameters = parameters;
lockAudioFilter = new ReentrantLock(false);
}
public void queueAudio(byte[] rawAudioFrame) {
int targetIndex = (lastAudioQueueBuffIndex + 1) % orignAudioBuffs.length;
if (orignAudioBuffs[targetIndex].isReadyToFill) {
LogTools.d("queueAudio,accept ,targetIndex" + targetIndex);
System.arraycopy(rawAudioFrame, 0, orignAudioBuffs[targetIndex].buff, 0, resCoreParameters.audioRecoderBufferSize);
orignAudioBuffs[targetIndex].isReadyToFill = false;
lastAudioQueueBuffIndex = targetIndex;
audioFilterHandler.sendMessage(audioFilterHandler.obtainMessage(AudioFilterHandler.WHAT_INCOMING_BUFF, targetIndex, 0));
} else {
LogTools.d("queueAudio,abandon,targetIndex" + targetIndex);
}
}
public boolean prepare(RESConfig resConfig) {
synchronized (syncOp) {
resCoreParameters.mediacodecAACProfile = MediaCodecInfo.CodecProfileLevel.AACObjectLC;
resCoreParameters.mediacodecAACSampleRate = 44100;
resCoreParameters.mediacodecAACChannelCount = 1;
resCoreParameters.mediacodecAACBitRate = 32 * 1024;
resCoreParameters.mediacodecAACMaxInputSize = 8820;
dstAudioFormat = new MediaFormat();
dstAudioEncoder = MediaCodecHelper.createAudioMediaCodec(resCoreParameters, dstAudioFormat);
if (dstAudioEncoder == null) {
LogTools.e("create Audio MediaCodec failed");
return false;
}
//audio
//44100/10=4410,4410*2 = 8820
int audioQueueNum = resCoreParameters.audioBufferQueueNum;
int orignAudioBuffSize = resCoreParameters.mediacodecAACSampleRate / 5;
orignAudioBuffs = new RESAudioBuff[audioQueueNum];
for (int i = 0; i < audioQueueNum; i++) {
orignAudioBuffs[i] = new RESAudioBuff(AudioFormat.ENCODING_PCM_16BIT, orignAudioBuffSize);
}
orignAudioBuff = new RESAudioBuff(AudioFormat.ENCODING_PCM_16BIT, orignAudioBuffSize);
filteredAudioBuff = new RESAudioBuff(AudioFormat.ENCODING_PCM_16BIT, orignAudioBuffSize);
return true;
}
}
public void start(RESFlvDataCollecter flvDataCollecter) {
synchronized (syncOp) {
try {
for (RESAudioBuff buff : orignAudioBuffs) {
buff.isReadyToFill = true;
}
if (dstAudioEncoder == null) {
dstAudioEncoder = MediaCodec.createEncoderByType(dstAudioFormat.getString(MediaFormat.KEY_MIME));
}
dstAudioEncoder.configure(dstAudioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
dstAudioEncoder.start();
lastAudioQueueBuffIndex = 0;
audioFilterHandlerThread = new HandlerThread("audioFilterHandlerThread");
audioSenderThread = new AudioSenderThread("AudioSenderThread", dstAudioEncoder, flvDataCollecter);
audioFilterHandlerThread.start();
audioSenderThread.start();
audioFilterHandler = new AudioFilterHandler(audioFilterHandlerThread.getLooper());
} catch (Exception e) {
LogTools.trace("RESSoftAudioCore", e);
}
}
}
public void stop() {
synchronized (syncOp) {
audioFilterHandler.removeCallbacksAndMessages(null);
audioFilterHandlerThread.quit();
try {
audioFilterHandlerThread.join();
audioSenderThread.quit();
audioSenderThread.join();
} catch (InterruptedException e) {
LogTools.trace("RESSoftAudioCore", e);
}
dstAudioEncoder.stop();
dstAudioEncoder.release();
dstAudioEncoder = null;
}
}
public BaseSoftAudioFilter acquireAudioFilter() {
lockAudioFilter.lock();
return audioFilter;
}
public void releaseAudioFilter() {
lockAudioFilter.unlock();
}
public void setAudioFilter(BaseSoftAudioFilter baseSoftAudioFilter) {
lockAudioFilter.lock();
if (audioFilter != null) {
audioFilter.onDestroy();
}
audioFilter = baseSoftAudioFilter;
if (audioFilter != null) {
audioFilter.onInit(resCoreParameters.mediacodecAACSampleRate / 5);
}
lockAudioFilter.unlock();
}
public void destroy() {
synchronized (syncOp) {
lockAudioFilter.lock();
if (audioFilter != null) {
audioFilter.onDestroy();
}
lockAudioFilter.unlock();
}
}
private class AudioFilterHandler extends Handler {
public static final int FILTER_LOCK_TOLERATION = 3;//3ms
public static final int WHAT_INCOMING_BUFF = 1;
private int sequenceNum;
AudioFilterHandler(Looper looper) {
super(looper);
sequenceNum = 0;
}
@Override
public void handleMessage(Message msg) {
if (msg.what != WHAT_INCOMING_BUFF) {
return;
}
sequenceNum++;
int targetIndex = msg.arg1;
long nowTimeMs = SystemClock.uptimeMillis();
System.arraycopy(orignAudioBuffs[targetIndex].buff, 0,
orignAudioBuff.buff, 0, orignAudioBuff.buff.length);
orignAudioBuffs[targetIndex].isReadyToFill = true;
boolean isFilterLocked = lockAudioFilter();
boolean filtered = false;
if (isFilterLocked) {
filtered = audioFilter.onFrame(orignAudioBuff.buff, filteredAudioBuff.buff, nowTimeMs, sequenceNum);
unlockAudioFilter();
} else {
System.arraycopy(orignAudioBuffs[targetIndex].buff, 0,
orignAudioBuff.buff, 0, orignAudioBuff.buff.length);
orignAudioBuffs[targetIndex].isReadyToFill = true;
}
//orignAudioBuff is ready
int eibIndex = dstAudioEncoder.dequeueInputBuffer(-1);
if (eibIndex >= 0) {
ByteBuffer dstAudioEncoderIBuffer = dstAudioEncoder.getInputBuffers()[eibIndex];
dstAudioEncoderIBuffer.position(0);
dstAudioEncoderIBuffer.put(filtered?filteredAudioBuff.buff:orignAudioBuff.buff, 0, orignAudioBuff.buff.length);
dstAudioEncoder.queueInputBuffer(eibIndex, 0, orignAudioBuff.buff.length, nowTimeMs * 1000, 0);
} else {
LogTools.d("dstAudioEncoder.dequeueInputBuffer(-1)<0");
}
LogTools.d("AudioFilterHandler,ProcessTime:" + (System.currentTimeMillis() - nowTimeMs));
}
/**
* @return ture if filter locked & filter!=null
*/
private boolean lockAudioFilter() {
try {
boolean locked = lockAudioFilter.tryLock(FILTER_LOCK_TOLERATION, TimeUnit.MILLISECONDS);
if (locked) {
if (audioFilter != null) {
return true;
} else {
lockAudioFilter.unlock();
return false;
}
} else {
return false;
}
} catch (InterruptedException e) {
}
return false;
}
private void unlockAudioFilter() {
lockAudioFilter.unlock();
}
}
}

View File

@@ -0,0 +1,555 @@
package me.lake.librestreaming.core;
import android.annotation.TargetApi;
import android.graphics.Bitmap;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.os.SystemClock;
import java.nio.ByteBuffer;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import me.lake.librestreaming.client.CallbackDelivery;
import me.lake.librestreaming.core.listener.RESScreenShotListener;
import me.lake.librestreaming.core.listener.RESVideoChangeListener;
import me.lake.librestreaming.encoder.MediaVideoEncoder;
import me.lake.librestreaming.filter.softvideofilter.BaseSoftVideoFilter;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.model.RESVideoBuff;
import me.lake.librestreaming.render.GLESRender;
import me.lake.librestreaming.render.IRender;
import me.lake.librestreaming.render.NativeRender;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.tools.BuffSizeCalculator;
import me.lake.librestreaming.tools.LogTools;
public class RESSoftVideoCore implements RESVideoCore {
RESCoreParameters resCoreParameters;
private final Object syncOp = new Object();
private SurfaceTexture cameraTexture;
private int currentCamera;
private MediaCodec dstVideoEncoder;
private boolean isEncoderStarted;
private final Object syncDstVideoEncoder = new Object();
private MediaFormat dstVideoFormat;
//render
private final Object syncPreview = new Object();
private IRender previewRender;
//filter
private Lock lockVideoFilter = null;
private BaseSoftVideoFilter videoFilter;
private VideoFilterHandler videoFilterHandler;
private HandlerThread videoFilterHandlerThread;
//sender
private VideoSenderThread videoSenderThread;
//VideoBuffs
//buffers to handle buff from queueVideo
private RESVideoBuff[] orignVideoBuffs;
private int lastVideoQueueBuffIndex;
//buffer to convert orignVideoBuff to NV21 if filter are set
private RESVideoBuff orignNV21VideoBuff;
//buffer to handle filtered color from filter if filter are set
private RESVideoBuff filteredNV21VideoBuff;
//buffer to convert other color format to suitable color format for dstVideoEncoder if nessesary
private RESVideoBuff suitable4VideoEncoderBuff;
final private Object syncResScreenShotListener = new Object();
private RESScreenShotListener resScreenShotListener;
private final Object syncIsLooping = new Object();
private boolean isPreviewing = false;
private boolean isStreaming = false;
private int loopingInterval;
public RESSoftVideoCore(RESCoreParameters parameters) {
resCoreParameters = parameters;
lockVideoFilter = new ReentrantLock(false);
videoFilter = null;
}
public void setCurrentCamera(int camIndex) {
if (currentCamera != camIndex) {
synchronized (syncOp) {
if (videoFilterHandler != null) {
videoFilterHandler.removeMessages(VideoFilterHandler.WHAT_INCOMING_BUFF);
}
if (orignVideoBuffs != null) {
for (RESVideoBuff buff : orignVideoBuffs) {
buff.isReadyToFill = true;
}
lastVideoQueueBuffIndex = 0;
}
}
}
currentCamera = camIndex;
}
@Override
public boolean prepare(RESConfig resConfig) {
synchronized (syncOp) {
resCoreParameters.renderingMode = resConfig.getRenderingMode();
resCoreParameters.mediacdoecAVCBitRate = resConfig.getBitRate();
resCoreParameters.videoBufferQueueNum = resConfig.getVideoBufferQueueNum();
resCoreParameters.mediacodecAVCIFrameInterval = resConfig.getVideoGOP();
resCoreParameters.mediacodecAVCFrameRate = resCoreParameters.videoFPS;
loopingInterval = 1000 / resCoreParameters.videoFPS;
dstVideoFormat = new MediaFormat();
synchronized (syncDstVideoEncoder) {
dstVideoEncoder = MediaCodecHelper.createSoftVideoMediaCodec(resCoreParameters, dstVideoFormat);
isEncoderStarted = false;
if (dstVideoEncoder == null) {
LogTools.e("create Video MediaCodec failed");
return false;
}
}
resCoreParameters.previewBufferSize = BuffSizeCalculator.calculator(resCoreParameters.videoWidth,
resCoreParameters.videoHeight, resCoreParameters.previewColorFormat);
//video
int videoWidth = resCoreParameters.videoWidth;
int videoHeight = resCoreParameters.videoHeight;
int videoQueueNum = resCoreParameters.videoBufferQueueNum;
orignVideoBuffs = new RESVideoBuff[videoQueueNum];
for (int i = 0; i < videoQueueNum; i++) {
orignVideoBuffs[i] = new RESVideoBuff(resCoreParameters.previewColorFormat, resCoreParameters.previewBufferSize);
}
lastVideoQueueBuffIndex = 0;
orignNV21VideoBuff = new RESVideoBuff(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
BuffSizeCalculator.calculator(videoWidth, videoHeight, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar));
filteredNV21VideoBuff = new RESVideoBuff(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
BuffSizeCalculator.calculator(videoWidth, videoHeight, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar));
suitable4VideoEncoderBuff = new RESVideoBuff(resCoreParameters.mediacodecAVCColorFormat,
BuffSizeCalculator.calculator(videoWidth, videoHeight, resCoreParameters.mediacodecAVCColorFormat));
videoFilterHandlerThread = new HandlerThread("videoFilterHandlerThread");
videoFilterHandlerThread.start();
videoFilterHandler = new VideoFilterHandler(videoFilterHandlerThread.getLooper());
return true;
}
}
@Override
public boolean startStreaming(RESFlvDataCollecter flvDataCollecter) {
synchronized (syncOp) {
try {
synchronized (syncDstVideoEncoder) {
if (dstVideoEncoder == null) {
dstVideoEncoder = MediaCodec.createEncoderByType(dstVideoFormat.getString(MediaFormat.KEY_MIME));
}
dstVideoEncoder.configure(dstVideoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
dstVideoEncoder.start();
isEncoderStarted = true;
}
videoSenderThread = new VideoSenderThread("VideoSenderThread", dstVideoEncoder, flvDataCollecter);
videoSenderThread.start();
synchronized (syncIsLooping) {
if (!isPreviewing && !isStreaming) {
videoFilterHandler.removeMessages(VideoFilterHandler.WHAT_DRAW);
videoFilterHandler.sendMessageDelayed(videoFilterHandler.obtainMessage(VideoFilterHandler.WHAT_DRAW, SystemClock.uptimeMillis() + loopingInterval), loopingInterval);
}
isStreaming = true;
}
} catch (Exception e) {
LogTools.trace("RESVideoClient.start()failed", e);
return false;
}
return true;
}
}
@Override
public void updateCamTexture(SurfaceTexture camTex) {
}
@Override
public boolean stopStreaming() {
synchronized (syncOp) {
videoSenderThread.quit();
synchronized (syncIsLooping) {
isStreaming = false;
}
try {
videoSenderThread.join();
} catch (InterruptedException e) {
LogTools.trace("RESCore", e);
}
synchronized (syncDstVideoEncoder) {
dstVideoEncoder.stop();
dstVideoEncoder.release();
dstVideoEncoder = null;
isEncoderStarted = false;
}
videoSenderThread = null;
return true;
}
}
@Override
public boolean destroy() {
synchronized (syncOp) {
lockVideoFilter.lock();
if (videoFilter != null) {
videoFilter.onDestroy();
}
lockVideoFilter.unlock();
return true;
}
}
@TargetApi(Build.VERSION_CODES.KITKAT)
@Override
public void reSetVideoBitrate(int bitrate) {
synchronized (syncOp) {
if (videoFilterHandler != null) {
videoFilterHandler.sendMessage(videoFilterHandler.obtainMessage(VideoFilterHandler.WHAT_RESET_BITRATE, bitrate, 0));
resCoreParameters.mediacdoecAVCBitRate = bitrate;
dstVideoFormat.setInteger(MediaFormat.KEY_BIT_RATE, resCoreParameters.mediacdoecAVCBitRate);
}
}
}
@TargetApi(Build.VERSION_CODES.KITKAT)
@Override
public int getVideoBitrate() {
synchronized (syncOp) {
return resCoreParameters.mediacdoecAVCBitRate;
}
}
@Override
public void reSetVideoFPS(int fps) {
synchronized (syncOp) {
resCoreParameters.videoFPS = fps;
loopingInterval = 1000 / resCoreParameters.videoFPS;
}
}
@Override
public void reSetVideoSize(RESCoreParameters newParameters) {
}
@Override
public void startPreview(SurfaceTexture surfaceTexture, int visualWidth, int visualHeight) {
synchronized (syncPreview) {
if (previewRender != null) {
throw new RuntimeException("startPreview without destroy previous");
}
switch (resCoreParameters.renderingMode) {
case RESCoreParameters.RENDERING_MODE_NATIVE_WINDOW:
previewRender = new NativeRender();
break;
case RESCoreParameters.RENDERING_MODE_OPENGLES:
previewRender = new GLESRender();
break;
default:
throw new RuntimeException("Unknow rendering mode");
}
previewRender.create(surfaceTexture,
resCoreParameters.previewColorFormat,
resCoreParameters.videoWidth,
resCoreParameters.videoHeight,
visualWidth,
visualHeight);
synchronized (syncIsLooping) {
if (!isPreviewing && !isStreaming) {
videoFilterHandler.removeMessages(VideoFilterHandler.WHAT_DRAW);
videoFilterHandler.sendMessageDelayed(videoFilterHandler.obtainMessage(VideoFilterHandler.WHAT_DRAW, SystemClock.uptimeMillis() + loopingInterval), loopingInterval);
}
isPreviewing = true;
}
}
}
@Override
public void updatePreview(int visualWidth, int visualHeight) {
synchronized (syncPreview) {
if (previewRender == null) {
throw new RuntimeException("updatePreview without startPreview");
}
previewRender.update(visualWidth, visualHeight);
}
}
@Override
public void stopPreview(boolean releaseTexture) {
synchronized (syncPreview) {
if (previewRender == null) {
throw new RuntimeException("stopPreview without startPreview");
}
previewRender.destroy(releaseTexture);
previewRender = null;
synchronized (syncIsLooping) {
isPreviewing = false;
}
}
}
public void queueVideo(byte[] rawVideoFrame) {
synchronized (syncOp) {
int targetIndex = (lastVideoQueueBuffIndex + 1) % orignVideoBuffs.length;
if (orignVideoBuffs[targetIndex].isReadyToFill) {
LogTools.d("queueVideo,accept ,targetIndex" + targetIndex);
acceptVideo(rawVideoFrame, orignVideoBuffs[targetIndex].buff);
orignVideoBuffs[targetIndex].isReadyToFill = false;
lastVideoQueueBuffIndex = targetIndex;
videoFilterHandler.sendMessage(videoFilterHandler.obtainMessage(VideoFilterHandler.WHAT_INCOMING_BUFF, targetIndex, 0));
} else {
LogTools.d("queueVideo,abandon,targetIndex" + targetIndex);
}
}
}
private void acceptVideo(byte[] src, byte[] dst) {
int directionFlag = currentCamera == Camera.CameraInfo.CAMERA_FACING_BACK ? resCoreParameters.backCameraDirectionMode : resCoreParameters.frontCameraDirectionMode;
ColorHelper.NV21Transform(src,
dst,
resCoreParameters.previewVideoWidth,
resCoreParameters.previewVideoHeight,
directionFlag);
}
public BaseSoftVideoFilter acquireVideoFilter() {
lockVideoFilter.lock();
return videoFilter;
}
public void releaseVideoFilter() {
lockVideoFilter.unlock();
}
public void setVideoFilter(BaseSoftVideoFilter baseSoftVideoFilter) {
lockVideoFilter.lock();
if (videoFilter != null) {
videoFilter.onDestroy();
}
videoFilter = baseSoftVideoFilter;
if (videoFilter != null) {
videoFilter.onInit(resCoreParameters.videoWidth, resCoreParameters.videoHeight);
}
lockVideoFilter.unlock();
}
@Override
public void takeScreenShot(RESScreenShotListener listener) {
synchronized (syncResScreenShotListener) {
resScreenShotListener = listener;
}
}
@Override
public void setVideoChangeListener(RESVideoChangeListener listener) {
}
@Override
public float getDrawFrameRate() {
synchronized (syncOp) {
return videoFilterHandler == null ? 0 : videoFilterHandler.getDrawFrameRate();
}
}
//worker handler
private class VideoFilterHandler extends Handler {
public static final int FILTER_LOCK_TOLERATION = 3;//3ms
public static final int WHAT_INCOMING_BUFF = 1;
public static final int WHAT_DRAW = 2;
public static final int WHAT_RESET_BITRATE = 3;
private int sequenceNum;
private RESFrameRateMeter drawFrameRateMeter;
VideoFilterHandler(Looper looper) {
super(looper);
sequenceNum = 0;
drawFrameRateMeter = new RESFrameRateMeter();
}
public float getDrawFrameRate() {
return drawFrameRateMeter.getFps();
}
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case WHAT_INCOMING_BUFF: {
int targetIndex = msg.arg1;
/**
* orignVideoBuffs[targetIndex] is ready
* orignVideoBuffs[targetIndex]->orignNV21VideoBuff
*/
System.arraycopy(orignVideoBuffs[targetIndex].buff, 0,
orignNV21VideoBuff.buff, 0, orignNV21VideoBuff.buff.length);
orignVideoBuffs[targetIndex].isReadyToFill = true;
}
break;
case WHAT_DRAW: {
long time = (Long) msg.obj;
long interval = time + loopingInterval - SystemClock.uptimeMillis();
synchronized (syncIsLooping) {
if (isPreviewing || isStreaming) {
if (interval > 0) {
videoFilterHandler.sendMessageDelayed(videoFilterHandler.obtainMessage(
VideoFilterHandler.WHAT_DRAW,
SystemClock.uptimeMillis() + interval),
interval);
} else {
videoFilterHandler.sendMessage(videoFilterHandler.obtainMessage(
VideoFilterHandler.WHAT_DRAW,
SystemClock.uptimeMillis() + loopingInterval));
}
}
}
sequenceNum++;
long nowTimeMs = SystemClock.uptimeMillis();
boolean isFilterLocked = lockVideoFilter();
if (isFilterLocked) {
boolean modified;
modified = videoFilter.onFrame(orignNV21VideoBuff.buff, filteredNV21VideoBuff.buff, nowTimeMs, sequenceNum);
unlockVideoFilter();
rendering(modified ? filteredNV21VideoBuff.buff : orignNV21VideoBuff.buff);
checkScreenShot(modified ? filteredNV21VideoBuff.buff : orignNV21VideoBuff.buff);
/**
* orignNV21VideoBuff is ready
* orignNV21VideoBuff->suitable4VideoEncoderBuff
*/
if (resCoreParameters.mediacodecAVCColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar) {
ColorHelper.NV21TOYUV420SP(modified ? filteredNV21VideoBuff.buff : orignNV21VideoBuff.buff,
suitable4VideoEncoderBuff.buff, resCoreParameters.videoWidth * resCoreParameters.videoHeight);
} else if (resCoreParameters.mediacodecAVCColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar) {
ColorHelper.NV21TOYUV420P(modified ? filteredNV21VideoBuff.buff : orignNV21VideoBuff.buff,
suitable4VideoEncoderBuff.buff, resCoreParameters.videoWidth * resCoreParameters.videoHeight);
} else {//LAKETODO colorConvert
}
} else {
rendering(orignNV21VideoBuff.buff);
checkScreenShot(orignNV21VideoBuff.buff);
if (resCoreParameters.mediacodecAVCColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar) {
ColorHelper.NV21TOYUV420SP(orignNV21VideoBuff.buff,
suitable4VideoEncoderBuff.buff,
resCoreParameters.videoWidth * resCoreParameters.videoHeight);
} else if (resCoreParameters.mediacodecAVCColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar) {
ColorHelper.NV21TOYUV420P(orignNV21VideoBuff.buff,
suitable4VideoEncoderBuff.buff,
resCoreParameters.videoWidth * resCoreParameters.videoHeight);
}
orignNV21VideoBuff.isReadyToFill = true;
}
drawFrameRateMeter.count();
//suitable4VideoEncoderBuff is ready
synchronized (syncDstVideoEncoder) {
if (dstVideoEncoder != null && isEncoderStarted) {
int eibIndex = dstVideoEncoder.dequeueInputBuffer(-1);
if (eibIndex >= 0) {
ByteBuffer dstVideoEncoderIBuffer = dstVideoEncoder.getInputBuffers()[eibIndex];
dstVideoEncoderIBuffer.position(0);
dstVideoEncoderIBuffer.put(suitable4VideoEncoderBuff.buff, 0, suitable4VideoEncoderBuff.buff.length);
dstVideoEncoder.queueInputBuffer(eibIndex, 0, suitable4VideoEncoderBuff.buff.length, nowTimeMs * 1000, 0);
} else {
LogTools.d("dstVideoEncoder.dequeueInputBuffer(-1)<0");
}
}
}
LogTools.d("VideoFilterHandler,ProcessTime:" + (System.currentTimeMillis() - nowTimeMs));
}
break;
case WHAT_RESET_BITRATE: {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT && dstVideoEncoder != null) {
Bundle bitrateBundle = new Bundle();
bitrateBundle.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, msg.arg1);
dstVideoEncoder.setParameters(bitrateBundle);
}
}
break;
}
}
/**
* rendering nv21 using native window
*
* @param pixel
*/
private void rendering(byte[] pixel) {
synchronized (syncPreview) {
if (previewRender == null) {
return;
}
previewRender.rendering(pixel);
}
}
/**
* check if screenshotlistener exist
*
* @param pixel
*/
private void checkScreenShot(byte[] pixel) {
synchronized (syncResScreenShotListener) {
if (resScreenShotListener != null) {
int[] argbPixel = new int[resCoreParameters.videoWidth * resCoreParameters.videoHeight];
ColorHelper.NV21TOARGB(pixel,
argbPixel,
resCoreParameters.videoWidth,
resCoreParameters.videoHeight);
Bitmap result = Bitmap.createBitmap(argbPixel,
resCoreParameters.videoWidth,
resCoreParameters.videoHeight,
Bitmap.Config.ARGB_8888);
CallbackDelivery.i().post(new RESScreenShotListener.RESScreenShotListenerRunable(resScreenShotListener, result));
resScreenShotListener = null;
}
}
}
/**
* @return ture if filter locked & filter!=null
*/
private boolean lockVideoFilter() {
try {
boolean locked = lockVideoFilter.tryLock(FILTER_LOCK_TOLERATION, TimeUnit.MILLISECONDS);
if (locked) {
if (videoFilter != null) {
return true;
} else {
lockVideoFilter.unlock();
return false;
}
} else {
return false;
}
} catch (InterruptedException e) {
}
return false;
}
private void unlockVideoFilter() {
lockVideoFilter.unlock();
}
}
public void setVideoEncoder(final MediaVideoEncoder encoder) {
}
@Override
public void setMirror(boolean isEnableMirror, boolean isEnablePreviewMirror, boolean isEnableStreamMirror) {
}
public void setNeedResetEglContext(boolean bol){
}
}

View File

@@ -0,0 +1,53 @@
package me.lake.librestreaming.core;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import me.lake.librestreaming.core.listener.RESScreenShotListener;
import me.lake.librestreaming.core.listener.RESVideoChangeListener;
import me.lake.librestreaming.encoder.MediaVideoEncoder;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
public interface RESVideoCore {
int OVERWATCH_TEXTURE_ID = 10;
boolean prepare(RESConfig resConfig);
void updateCamTexture(SurfaceTexture camTex);
void startPreview(SurfaceTexture surfaceTexture, int visualWidth, int visualHeight);
void updatePreview(int visualWidth, int visualHeight);
void stopPreview(boolean releaseTexture);
boolean startStreaming(RESFlvDataCollecter flvDataCollecter);
boolean stopStreaming();
boolean destroy();
void reSetVideoBitrate(int bitrate);
int getVideoBitrate();
void reSetVideoFPS(int fps);
void reSetVideoSize(RESCoreParameters newParameters);
void setCurrentCamera(int cameraIndex);
void takeScreenShot(RESScreenShotListener listener);
void setVideoChangeListener(RESVideoChangeListener listener);
float getDrawFrameRate();
void setVideoEncoder(final MediaVideoEncoder encoder);
void setMirror(boolean isEnableMirror,boolean isEnablePreviewMirror,boolean isEnableStreamMirror);
void setNeedResetEglContext(boolean bol);
}

View File

@@ -0,0 +1,138 @@
package me.lake.librestreaming.core;
import android.media.MediaCodec;
import android.media.MediaFormat;
import java.nio.ByteBuffer;
import me.lake.librestreaming.rtmp.RESFlvData;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.rtmp.RESRtmpSender;
import me.lake.librestreaming.tools.LogTools;
public class VideoSenderThread extends Thread {
private static final long WAIT_TIME = 5000;
private MediaCodec.BufferInfo eInfo;
private long startTime = 0;
private MediaCodec dstVideoEncoder;
private final Object syncDstVideoEncoder = new Object();
private RESFlvDataCollecter dataCollecter;
VideoSenderThread(String name, MediaCodec encoder, RESFlvDataCollecter flvDataCollecter) {
super(name);
eInfo = new MediaCodec.BufferInfo();
startTime = 0;
dstVideoEncoder = encoder;
dataCollecter = flvDataCollecter;
}
public void updateMediaCodec(MediaCodec encoder) {
synchronized (syncDstVideoEncoder) {
dstVideoEncoder = encoder;
}
}
private boolean shouldQuit = false;
void quit() {
shouldQuit = true;
this.interrupt();
}
@Override
public void run() {
while (!shouldQuit) {
synchronized (syncDstVideoEncoder) {
int eobIndex = MediaCodec.INFO_TRY_AGAIN_LATER;
try {
eobIndex = dstVideoEncoder.dequeueOutputBuffer(eInfo, WAIT_TIME);
} catch (Exception ignored) {
}
switch (eobIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
LogTools.d("VideoSenderThread,MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED");
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
// LogTools.d("VideoSenderThread,MediaCodec.INFO_TRY_AGAIN_LATER");
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
LogTools.d("VideoSenderThread,MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:" +
dstVideoEncoder.getOutputFormat().toString());
sendAVCDecoderConfigurationRecord(0, dstVideoEncoder.getOutputFormat());
break;
default:
LogTools.d("VideoSenderThread,MediaCode,eobIndex=" + eobIndex);
if (startTime == 0) {
startTime = eInfo.presentationTimeUs / 1000;
}
/**
* we send sps pps already in INFO_OUTPUT_FORMAT_CHANGED
* so we ignore MediaCodec.BUFFER_FLAG_CODEC_CONFIG
*/
if (eInfo.flags != MediaCodec.BUFFER_FLAG_CODEC_CONFIG && eInfo.size != 0) {
ByteBuffer realData = dstVideoEncoder.getOutputBuffers()[eobIndex];
realData.position(eInfo.offset + 4);
realData.limit(eInfo.offset + eInfo.size);
sendRealData((eInfo.presentationTimeUs / 1000) - startTime, realData);
}
dstVideoEncoder.releaseOutputBuffer(eobIndex, false);
break;
}
}
try {
sleep(5);
} catch (InterruptedException ignored) {
}
}
eInfo = null;
}
private void sendAVCDecoderConfigurationRecord(long tms, MediaFormat format) {
byte[] AVCDecoderConfigurationRecord = Packager.H264Packager.generateAVCDecoderConfigurationRecord(format);
int packetLen = Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH +
AVCDecoderConfigurationRecord.length;
byte[] finalBuff = new byte[packetLen];
Packager.FLVPackager.fillFlvVideoTag(finalBuff,
0,
true,
true,
AVCDecoderConfigurationRecord.length);
System.arraycopy(AVCDecoderConfigurationRecord, 0,
finalBuff, Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH, AVCDecoderConfigurationRecord.length);
RESFlvData resFlvData = new RESFlvData();
resFlvData.droppable = false;
resFlvData.byteBuffer = finalBuff;
resFlvData.size = finalBuff.length;
resFlvData.dts = (int) tms;
resFlvData.flvTagType = RESFlvData.FLV_RTMP_PACKET_TYPE_VIDEO;
resFlvData.videoFrameType = RESFlvData.NALU_TYPE_IDR;
dataCollecter.collect(resFlvData, RESRtmpSender.FROM_VIDEO);
}
private void sendRealData(long tms, ByteBuffer realData) {
int realDataLength = realData.remaining();
int packetLen = Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH +
Packager.FLVPackager.NALU_HEADER_LENGTH +
realDataLength;
byte[] finalBuff = new byte[packetLen];
realData.get(finalBuff, Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH +
Packager.FLVPackager.NALU_HEADER_LENGTH,
realDataLength);
int frameType = finalBuff[Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH +
Packager.FLVPackager.NALU_HEADER_LENGTH] & 0x1F;
Packager.FLVPackager.fillFlvVideoTag(finalBuff,
0,
false,
frameType == 5,
realDataLength);
RESFlvData resFlvData = new RESFlvData();
resFlvData.droppable = true;
resFlvData.byteBuffer = finalBuff;
resFlvData.size = finalBuff.length;
resFlvData.dts = (int) tms;
resFlvData.flvTagType = RESFlvData.FLV_RTMP_PACKET_TYPE_VIDEO;
resFlvData.videoFrameType = frameType;
dataCollecter.collect(resFlvData, RESRtmpSender.FROM_VIDEO);
}
}

View File

@@ -0,0 +1,27 @@
package me.lake.librestreaming.core.listener;
public interface RESConnectionListener {
void onOpenConnectionResult(int result);
void onWriteError(int errno);
void onCloseConnectionResult(int result);
class RESWriteErrorRunable implements Runnable {
RESConnectionListener connectionListener;
int errno;
public RESWriteErrorRunable(RESConnectionListener connectionListener, int errno) {
this.connectionListener = connectionListener;
this.errno = errno;
}
@Override
public void run() {
if (connectionListener != null) {
connectionListener.onWriteError(errno);
}
}
}
}

View File

@@ -0,0 +1,25 @@
package me.lake.librestreaming.core.listener;
import android.graphics.Bitmap;
public interface RESScreenShotListener {
void onScreenShotResult(Bitmap bitmap);
class RESScreenShotListenerRunable implements Runnable {
Bitmap resultBitmap;
RESScreenShotListener resScreenShotListener;
public RESScreenShotListenerRunable(RESScreenShotListener listener, Bitmap bitmap) {
resScreenShotListener = listener;
resultBitmap = bitmap;
}
@Override
public void run() {
if (resScreenShotListener != null) {
resScreenShotListener.onScreenShotResult(resultBitmap);
}
}
}
}

View File

@@ -0,0 +1,24 @@
package me.lake.librestreaming.core.listener;
public interface RESVideoChangeListener {
void onVideoSizeChanged(int width, int height);
class RESVideoChangeRunable implements Runnable {
RESVideoChangeListener videoChangeListener;
int w, h;
public RESVideoChangeRunable(RESVideoChangeListener videoChangeListener, int w, int h) {
this.videoChangeListener = videoChangeListener;
this.w = w;
this.h = h;
}
@Override
public void run() {
if (videoChangeListener != null) {
videoChangeListener.onVideoSizeChanged(w, h);
}
}
}
}

View File

@@ -0,0 +1,186 @@
package me.lake.librestreaming.encoder;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.util.Log;
import java.io.IOException;
import java.nio.ByteBuffer;
public class MediaAudioEncoder extends MediaEncoder {
private static final boolean DEBUG = false; // TODO set false on release
private static final String TAG = "MediaAudioEncoder";
private static final String MIME_TYPE = "audio/mp4a-latm";
private static final int SAMPLE_RATE = 44100; // 44.1[KHz] is only setting guaranteed to be available on all devices.
private static final int BIT_RATE = 64000;
public static final int SAMPLES_PER_FRAME = 1024; // AAC, bytes/frame/channel
public static final int FRAMES_PER_BUFFER = 25; // AAC, frame/buffer/sec
private AudioThread mAudioThread = null;
public MediaAudioEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener) {
super(muxer, listener);
}
@Override
protected void prepare() throws IOException {
if (DEBUG) Log.v(TAG, "prepare:");
mTrackIndex = -1;
mMuxerStarted = mIsEOS = false;
// prepare MediaCodec for AAC encoding of audio data from inernal mic.
final MediaCodecInfo audioCodecInfo = selectAudioCodec(MIME_TYPE);
if (audioCodecInfo == null) {
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
return;
}
if (DEBUG) Log.i(TAG, "selected codec: " + audioCodecInfo.getName());
final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, 2);
audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_STEREO);
audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 2);
// audioFormat.setLong(MediaFormat.KEY_MAX_INPUT_SIZE, inputFile.length());
// audioFormat.setLong(MediaFormat.KEY_DURATION, (long)durationInMs );
if (DEBUG) Log.i(TAG, "format: " + audioFormat);
mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
if (DEBUG) Log.i(TAG, "prepare finishing");
if (mListener != null) {
try {
mListener.onPrepared(this);
} catch (final Exception e) {
Log.e(TAG, "prepare:", e);
}
}
}
@Override
protected void startRecording() {
super.startRecording();
// create and execute audio capturing thread using internal mic
if (mAudioThread == null) {
mAudioThread = new AudioThread();
mAudioThread.start();
}
}
@Override
protected void release() {
mAudioThread = null;
super.release();
}
private static final int[] AUDIO_SOURCES = new int[] {
MediaRecorder.AudioSource.MIC,
MediaRecorder.AudioSource.DEFAULT,
MediaRecorder.AudioSource.CAMCORDER,
MediaRecorder.AudioSource.VOICE_COMMUNICATION,
MediaRecorder.AudioSource.VOICE_RECOGNITION,
};
/**
* Thread to capture audio data from internal mic as uncompressed 16bit PCM data
* and write them to the MediaCodec encoder
*/
private class AudioThread extends Thread {
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
try {
final int min_buffer_size = AudioRecord.getMinBufferSize(
SAMPLE_RATE, AudioFormat.CHANNEL_IN_STEREO,
AudioFormat.ENCODING_PCM_16BIT);
int buffer_size = SAMPLES_PER_FRAME * FRAMES_PER_BUFFER;
if (buffer_size < min_buffer_size)
buffer_size = ((min_buffer_size / SAMPLES_PER_FRAME) + 1) * SAMPLES_PER_FRAME * 2;
AudioRecord audioRecord = null;
for (final int source : AUDIO_SOURCES) {
try {
audioRecord = new AudioRecord(
source, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_STEREO, AudioFormat.ENCODING_PCM_16BIT, buffer_size);
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED)
audioRecord = null;
} catch (final Exception e) {
audioRecord = null;
}
if (audioRecord != null) break;
}
if (audioRecord != null) {
try {
if (mIsCapturing) {
if (DEBUG) Log.v(TAG, "AudioThread:start audio recording");
final ByteBuffer buf = ByteBuffer.allocateDirect(SAMPLES_PER_FRAME);
int readBytes;
audioRecord.startRecording();
try {
for (; mIsCapturing && !mRequestStop && !mIsEOS ;) {
// read audio data from internal mic
buf.clear();
readBytes = audioRecord.read(buf, SAMPLES_PER_FRAME);
if (readBytes > 0) {
// set audio data to encoder
buf.position(readBytes);
buf.flip();
encode(buf, readBytes, getPTSUs());
frameAvailableSoon();
}
}
frameAvailableSoon();
} finally {
audioRecord.stop();
}
}
} finally {
audioRecord.release();
}
} else {
Log.e(TAG, "failed to initialize AudioRecord");
}
} catch (final Exception e) {
Log.e(TAG, "AudioThread#run", e);
}
if (DEBUG) Log.v(TAG, "AudioThread:finished");
}
}
/**
* select the first codec that match a specific MIME type
* @param mimeType
* @return
*/
private static final MediaCodecInfo selectAudioCodec(final String mimeType) {
if (DEBUG) Log.v(TAG, "selectAudioCodec:");
MediaCodecInfo result = null;
// get the list of available codecs
final int numCodecs = MediaCodecList.getCodecCount();
LOOP: for (int i = 0; i < numCodecs; i++) {
final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) { // skipp decoder
continue;
}
final String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (DEBUG) Log.i(TAG, "supportedType:" + codecInfo.getName() + ",MIME=" + types[j]);
if (types[j].equalsIgnoreCase(mimeType)) {
if (result == null) {
result = codecInfo;
break LOOP;
}
}
}
}
return result;
}
}

View File

@@ -0,0 +1,379 @@
package me.lake.librestreaming.encoder;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.util.Log;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
public abstract class MediaEncoder implements Runnable {
private static final boolean DEBUG = false; // TODO set false on release
private static final String TAG = "MediaEncoder";
protected static final int TIMEOUT_USEC = 10000; // 10[msec]
protected static final int MSG_FRAME_AVAILABLE = 1;
protected static final int MSG_STOP_RECORDING = 9;
public interface MediaEncoderListener {
public void onPrepared(MediaEncoder encoder);
public void onStopped(MediaEncoder encoder);
}
protected final Object mSync = new Object();
/**
* Flag that indicate this encoder is capturing now.
*/
protected volatile boolean mIsCapturing;
/**
* Flag that indicate the frame data will be available soon.
*/
private int mRequestDrain;
/**
* Flag to request stop capturing
*/
protected volatile boolean mRequestStop;
/**
* Flag that indicate encoder received EOS(End Of Stream)
*/
protected boolean mIsEOS;
/**
* Flag the indicate the muxer is running
*/
protected boolean mMuxerStarted;
/**
* Track Number
*/
protected int mTrackIndex;
/**
* MediaCodec instance for encoding
*/
protected MediaCodec mMediaCodec; // API >= 16(Android4.1.2)
/**
* Weak refarence of MediaMuxerWarapper instance
*/
protected final WeakReference<MediaMuxerWrapper> mWeakMuxer;
/**
* BufferInfo instance for dequeuing
*/
private MediaCodec.BufferInfo mBufferInfo; // API >= 16(Android4.1.2)
protected final MediaEncoderListener mListener;
public MediaEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener) {
if (listener == null) throw new NullPointerException("MediaEncoderListener is null");
if (muxer == null) throw new NullPointerException("MediaMuxerWrapper is null");
mWeakMuxer = new WeakReference<MediaMuxerWrapper>(muxer);
muxer.addEncoder(this);
mListener = listener;
synchronized (mSync) {
// create BufferInfo here for effectiveness(to reduce GC)
mBufferInfo = new MediaCodec.BufferInfo();
// wait for starting thread
new Thread(this, getClass().getSimpleName()).start();
try {
mSync.wait();
} catch (final InterruptedException e) {
}
}
}
public String getOutputPath() {
final MediaMuxerWrapper muxer = mWeakMuxer.get();
return muxer != null ? muxer.getOutputPath() : null;
}
/**
* the method to indicate frame data is soon available or already available
* @return return true if encoder is ready to encod.
*/
public boolean frameAvailableSoon() {
if (DEBUG) Log.v(TAG, "frameAvailableSoon");
synchronized (mSync) {
if (!mIsCapturing || mRequestStop) {
return false;
}
mRequestDrain++;
mSync.notifyAll();
}
return true;
}
/**
* encoding loop on private thread
*/
@Override
public void run() {
// android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
synchronized (mSync) {
mRequestStop = false;
mRequestDrain = 0;
mSync.notify();
}
final boolean isRunning = true;
boolean localRequestStop;
boolean localRequestDrain;
while (isRunning) {
synchronized (mSync) {
localRequestStop = mRequestStop;
localRequestDrain = (mRequestDrain > 0);
if (localRequestDrain)
mRequestDrain--;
}
if (localRequestStop) {
drain();
// request stop recording
signalEndOfInputStream();
// process output data again for EOS signale
drain();
// release all related objects
release();
break;
}
if (localRequestDrain) {
drain();
} else {
synchronized (mSync) {
try {
mSync.wait();
} catch (final InterruptedException e) {
break;
}
}
}
} // end of while
if (DEBUG) Log.d(TAG, "Encoder thread exiting");
synchronized (mSync) {
mRequestStop = true;
mIsCapturing = false;
}
}
/*
* prepareing method for each sub class
* this method should be implemented in sub class, so set this as abstract method
* @throws IOException
*/
/*package*/ abstract void prepare() throws IOException;
/*package*/ void startRecording() {
if (DEBUG) Log.v(TAG, "startRecording");
synchronized (mSync) {
mIsCapturing = true;
mRequestStop = false;
mSync.notifyAll();
}
}
/**
* the method to request stop encoding
*/
/*package*/ void stopRecording() {
if (DEBUG) Log.v(TAG, "stopRecording");
synchronized (mSync) {
if (!mIsCapturing || mRequestStop) {
return;
}
mRequestStop = true; // for rejecting newer frame
mSync.notifyAll();
// We can not know when the encoding and writing finish.
// so we return immediately after request to avoid delay of caller thread
}
}
//********************************************************************************
//********************************************************************************
/**
* Release all releated objects
*/
protected void release() {
if (DEBUG) Log.d(TAG, "release:");
try {
mListener.onStopped(this);
} catch (final Exception e) {
Log.e(TAG, "failed onStopped", e);
}
mIsCapturing = false;
if (mMediaCodec != null) {
try {
mMediaCodec.stop();
mMediaCodec.release();
mMediaCodec = null;
} catch (final Exception e) {
Log.e(TAG, "failed releasing MediaCodec", e);
}
}
if (mMuxerStarted) {
final MediaMuxerWrapper muxer = mWeakMuxer != null ? mWeakMuxer.get() : null;
if (muxer != null) {
try {
muxer.stop();
} catch (final Exception e) {
Log.e(TAG, "failed stopping muxer", e);
}
}
}
mBufferInfo = null;
}
protected void signalEndOfInputStream() {
if (DEBUG) Log.d(TAG, "sending EOS to encoder");
// signalEndOfInputStream is only avairable for video encoding with surface
// and equivalent sending a empty buffer with BUFFER_FLAG_END_OF_STREAM flag.
// mMediaCodec.signalEndOfInputStream(); // API >= 18
encode(null, 0, getPTSUs());
}
/**
* Method to set byte array to the MediaCodec encoder
* @param buffer
* @param length length of byte array, zero means EOS.
* @param presentationTimeUs
*/
protected void encode(final ByteBuffer buffer, final int length, final long presentationTimeUs) {
if (!mIsCapturing) return;
final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
while (mIsCapturing) {
final int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
Log.e(TAG, "inputBufferIndex: "+inputBufferIndex );
if (inputBufferIndex >= 0) {
final ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
if (buffer != null) {
inputBuffer.put(buffer);
}
if (DEBUG) Log.v(TAG, "encode:queueInputBuffer");
if (length <= 0) {
// send EOS
mIsEOS = true;
if (DEBUG) Log.i(TAG, "send BUFFER_FLAG_END_OF_STREAM");
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, 0,
presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
break;
} else {
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, length,
presentationTimeUs, 0);
}
break;
} else if (inputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// wait for MediaCodec encoder is ready to encode
// nothing to do here because MediaCodec#dequeueInputBuffer(TIMEOUT_USEC)
// will wait for maximum TIMEOUT_USEC(10msec) on each call
}
}
}
/**
* drain encoded data and write them to muxer
*/
protected void drain() {
if (mMediaCodec == null) return;
ByteBuffer[] encoderOutputBuffers = mMediaCodec.getOutputBuffers();
Log.e(TAG, "encoderOutputBuffers: "+encoderOutputBuffers.length );
int encoderStatus, count = 0;
final MediaMuxerWrapper muxer = mWeakMuxer.get();
if (muxer == null) {
// throw new NullPointerException("muxer is unexpectedly null");
Log.w(TAG, "muxer is unexpectedly null");
return;
}
Log.e(TAG, "mIsCapturing: "+mIsCapturing );
LOOP: while (mIsCapturing) {
// get encoded data with maximum timeout duration of TIMEOUT_USEC(=10[msec])
encoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
Log.e(TAG, "encoderStatus: "+encoderStatus );
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// wait 5 counts(=TIMEOUT_USEC x 5 = 50msec) until data/EOS come
if (!mIsEOS) {
if (++count > 5)
break LOOP; // out of while
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
if (DEBUG) Log.v(TAG, "INFO_OUTPUT_BUFFERS_CHANGED");
// this shoud not come when encoding
encoderOutputBuffers = mMediaCodec.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
if (DEBUG) Log.v(TAG, "INFO_OUTPUT_FORMAT_CHANGED");
// this status indicate the output format of codec is changed
// this should come only once before actual encoded data
// but this status never come on Android4.3 or less
// and in that case, you should treat when MediaCodec.BUFFER_FLAG_CODEC_CONFIG come.
if (mMuxerStarted) { // second time request is error
throw new RuntimeException("format changed twice");
}
// get output format from codec and pass them to muxer
// getOutputFormat should be called after INFO_OUTPUT_FORMAT_CHANGED otherwise crash.
final MediaFormat format = mMediaCodec.getOutputFormat(); // API >= 16
mTrackIndex = muxer.addTrack(format);
mMuxerStarted = true;
if (!muxer.start()) {
// we should wait until muxer is ready
synchronized (muxer) {
while (!muxer.isStarted())
try {
muxer.wait(100);
} catch (final InterruptedException e) {
break LOOP;
}
}
}
} else if (encoderStatus < 0) {
// unexpected status
if (DEBUG) Log.w(TAG, "drain:unexpected result from encoder#dequeueOutputBuffer: " + encoderStatus);
} else {
final ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
// this never should come...may be a MediaCodec internal error
throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// You shoud set output format to muxer here when you target Android4.3 or less
// but MediaCodec#getOutputFormat can not call here(because INFO_OUTPUT_FORMAT_CHANGED don't come yet)
// therefor we should expand and prepare output format from buffer data.
// This sample is for API>=18(>=Android 4.3), just ignore this flag here
if (DEBUG) Log.d(TAG, "drain:BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
// encoded data is ready, clear waiting counter
count = 0;
if (!mMuxerStarted) {
// muxer is not ready...this will prrograming failure.
throw new RuntimeException("drain:muxer hasn't started");
}
// write encoded data to muxer(need to adjust presentationTimeUs.
mBufferInfo.presentationTimeUs = getPTSUs();
muxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
prevOutputPTSUs = mBufferInfo.presentationTimeUs;
}
// return buffer to encoder
mMediaCodec.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
// when EOS come.
mIsCapturing = false;
break; // out of while
}
}
}
}
/**
* previous presentationTimeUs for writing
*/
private long prevOutputPTSUs = 0;
/**
* get next encoding presentationTimeUs
* @return
*/
protected long getPTSUs() {
long result = System.nanoTime() / 1000L;
// presentationTimeUs should be monotonic
// otherwise muxer fail to write
if (result < prevOutputPTSUs)
result = (prevOutputPTSUs - result) + result;
return result;
}
}

View File

@@ -0,0 +1,232 @@
package me.lake.librestreaming.encoder;
import android.content.Context;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.Environment;
import android.text.TextUtils;
import android.util.Log;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.text.SimpleDateFormat;
import java.util.GregorianCalendar;
import java.util.Locale;
public class MediaMuxerWrapper {
private static final boolean DEBUG = false; // TODO set false on release
private static final String TAG = "MediaMuxerWrapper";
private static final String DIR_NAME = "WSLive";
private static final SimpleDateFormat mDateTimeFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss", Locale.US);
private String mOutputPath;
private final MediaMuxer mMediaMuxer; // API >= 18
private int mEncoderCount, mStatredCount;
private boolean mIsStarted;
private MediaEncoder mVideoEncoder, mAudioEncoder;
/**
* Constructor
* @param ext extension of output file
* @throws IOException
*/
public MediaMuxerWrapper(String ext) throws IOException {
if (TextUtils.isEmpty(ext)) ext = ".mp4";
try {
mOutputPath = getCaptureFile(Environment.DIRECTORY_MOVIES, ext).toString();
//mOutputPath =newTmpDir("Movies");/* getCaptureFile(Environment.DIRECTORY_MOVIES, ext).toString();*/
} catch (final NullPointerException e) {
throw new RuntimeException("This app has no permission of writing external storage");
}
mMediaMuxer = new MediaMuxer(mOutputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
mEncoderCount = mStatredCount = 0;
mIsStarted = false;
}
public static final String ROOT_DIR = "video";
private static final String DIR_TMP = "tmp";
private static Context mContext;
public static void setContext(Context context){
mContext = context;
}
/**
* 新建tmp目录,tmp/xxx/
*
* @param dirName
* @return
*/
public static String newTmpDir(String dirName) {
File tmpDir = new File(getStorageRoot(mContext, ROOT_DIR, true), DIR_TMP);
if (!tmpDir.exists() || !tmpDir.isDirectory()) {
tmpDir.mkdirs();
}
File dir = new File(tmpDir, dirName);
if (!dir.exists() || !dir.isDirectory()) {
dir.mkdirs();
}
return dir.getAbsolutePath()+getDateTimeString() + ".mp4";
}
/**
* 获取缓存root路径
*
* @param context
* @param isExternFirst 是否外存优先
* @return
*/
public static File getStorageRoot(Context context, String dirName, boolean isExternFirst) {
File cacheDir = null;
if ((Environment.MEDIA_MOUNTED.equals(Environment.getExternalStorageState())
|| !Environment.isExternalStorageRemovable()) && isExternFirst) {
cacheDir = context.getExternalCacheDir();
} else {
cacheDir = context.getCacheDir();
}
File dir = new File(cacheDir, dirName);
if (!dir.exists() || !dir.isDirectory()) {
dir.mkdirs();
}
return dir;
}
public String getOutputPath() {
return mOutputPath;
}
public void prepare() throws IOException {
if (mVideoEncoder != null)
mVideoEncoder.prepare();
if (mAudioEncoder != null)
mAudioEncoder.prepare();
}
public void startRecording() {
if (mVideoEncoder != null)
mVideoEncoder.startRecording();
if (mAudioEncoder != null)
mAudioEncoder.startRecording();
}
public void stopRecording() {
if (mVideoEncoder != null)
mVideoEncoder.stopRecording();
mVideoEncoder = null;
if (mAudioEncoder != null)
mAudioEncoder.stopRecording();
mAudioEncoder = null;
}
public synchronized boolean isStarted() {
return mIsStarted;
}
//**********************************************************************
//**********************************************************************
/**
* assign encoder to this calss. this is called from encoder.
* @param encoder instance of MediaVideoEncoder or MediaAudioEncoder
*/
/*package*/ void addEncoder(final MediaEncoder encoder) {
if (encoder instanceof MediaVideoEncoder) {
if (mVideoEncoder != null)
throw new IllegalArgumentException("Video encoder already added.");
mVideoEncoder = encoder;
} else if (encoder instanceof MediaAudioEncoder) {
if (mAudioEncoder != null)
throw new IllegalArgumentException("Video encoder already added.");
mAudioEncoder = encoder;
} else
throw new IllegalArgumentException("unsupported encoder");
mEncoderCount = (mVideoEncoder != null ? 1 : 0) + (mAudioEncoder != null ? 1 : 0);
}
/**
* request start recording from encoder
* @return true when muxer is ready to write
*/
/*package*/ synchronized boolean start() {
if (DEBUG) Log.v(TAG, "start:");
mStatredCount++;
if ((mEncoderCount > 0) && (mStatredCount == mEncoderCount)) {
mMediaMuxer.start();
mIsStarted = true;
notifyAll();
if (DEBUG) Log.v(TAG, "MediaMuxer started:");
}
return mIsStarted;
}
/**
* request stop recording from encoder when encoder received EOS
*/
/*package*/ synchronized void stop() {
if (DEBUG) Log.v(TAG, "stop:mStatredCount=" + mStatredCount);
mStatredCount--;
if ((mEncoderCount > 0) && (mStatredCount <= 0)) {
mMediaMuxer.stop();
mMediaMuxer.release();
mIsStarted = false;
if (DEBUG) Log.v(TAG, "MediaMuxer stopped:");
}
}
/**
* assign encoder to muxer
* @param format
* @return minus value indicate error
*/
/*package*/ synchronized int addTrack(final MediaFormat format) {
if (mIsStarted)
throw new IllegalStateException("muxer already started");
final int trackIx = mMediaMuxer.addTrack(format);
if (DEBUG) Log.i(TAG, "addTrack:trackNum=" + mEncoderCount + ",trackIx=" + trackIx + ",format=" + format);
return trackIx;
}
/**
* write encoded data to muxer
* @param trackIndex
* @param byteBuf
* @param bufferInfo
*/
/*package*/ synchronized void writeSampleData(final int trackIndex, final ByteBuffer byteBuf, final MediaCodec.BufferInfo bufferInfo) {
if (mStatredCount > 0)
mMediaMuxer.writeSampleData(trackIndex, byteBuf, bufferInfo);
}
//**********************************************************************
//**********************************************************************
/**
* generate output file
* @param type Environment.DIRECTORY_MOVIES / Environment.DIRECTORY_DCIM etc.
* @param ext .mp4(.m4a for audio) or .png
* @return return null when this app has no writing permission to external storage.
*/
public static final File getCaptureFile(final String type, final String ext) {
final File dir = new File(Environment.getExternalStoragePublicDirectory(type), DIR_NAME);
Log.d(TAG, "path=" + dir.toString());
dir.mkdirs();
if (dir.canWrite()) {
return new File(dir, getDateTimeString() + ext);
}
return null;
}
/**
* get current date and time as String
* @return
*/
private static final String getDateTimeString() {
final GregorianCalendar now = new GregorianCalendar();
return mDateTimeFormat.format(now.getTime());
}
public String getFilePath(){
return mOutputPath;
}
}

View File

@@ -0,0 +1,251 @@
package me.lake.librestreaming.encoder;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.opengl.EGLContext;
import android.opengl.Matrix;
import android.util.Log;
import android.view.Surface;
import java.io.IOException;
import me.lake.librestreaming.encoder.utils.RenderHandler;
public class MediaVideoEncoder extends MediaEncoder {
private static final boolean DEBUG = false; // TODO set false on release
private static final String TAG = "MediaVideoEncoder";
private static final String MIME_TYPE = "video/avc";
// parameters for recording
private static final int FRAME_RATE = 24;
private static final float BPP = 0.25f;
private final int mWidth;
private final int mHeight;
private RenderHandler mRenderHandler;
private Surface mSurface;
private int previewW, previewH; //预览宽高
private float[] mvpMatrix = new float[]{
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
};
private boolean isMatrixCalc = false;
public MediaVideoEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener, final int width, final int height) {
super(muxer, listener);
if (DEBUG) Log.i(TAG, "MediaVideoEncoder: ");
mWidth = width;
mHeight = height;
mRenderHandler = RenderHandler.createHandler(TAG);
}
public boolean frameAvailableSoon(final float[] tex_matrix) {
boolean result;
if (result = super.frameAvailableSoon())
mRenderHandler.draw(tex_matrix);
return result;
}
public boolean frameAvailableSoon(final float[] tex_matrix, final float[] mvp_matrix) {
boolean result;
if (result = super.frameAvailableSoon())
mRenderHandler.draw(tex_matrix, mvp_matrix);
return result;
}
@Override
public boolean frameAvailableSoon() {
boolean result;
if (result = super.frameAvailableSoon())
mRenderHandler.draw(null);
return result;
}
@Override
protected void prepare() throws IOException {
if (DEBUG) Log.i(TAG, "prepare: ");
mTrackIndex = -1;
mMuxerStarted = mIsEOS = false;
final MediaCodecInfo videoCodecInfo = selectVideoCodec(MIME_TYPE);
if (videoCodecInfo == null) {
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
return;
}
if (DEBUG) Log.i(TAG, "selected codec: " + videoCodecInfo.getName());
final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); // API >= 18
format.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate());
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 6);
if (DEBUG) Log.i(TAG, "format: " + format);
mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
// get Surface for encoder input
// this method only can call between #configure and #start
mSurface = mMediaCodec.createInputSurface(); // API >= 18
mMediaCodec.start();
if (DEBUG) Log.i(TAG, "prepare finishing");
if (mListener != null) {
try {
mListener.onPrepared(this);
} catch (final Exception e) {
Log.e(TAG, "prepare:", e);
}
}
}
public void setEglContext(final EGLContext shared_context, final int tex_id) {
mRenderHandler.setEglContext(shared_context, tex_id, mSurface, true);
}
@Override
protected void release() {
if (DEBUG) Log.i(TAG, "release:");
if (mSurface != null) {
mSurface.release();
mSurface = null;
}
if (mRenderHandler != null) {
mRenderHandler.release();
mRenderHandler = null;
}
super.release();
}
private int calcBitRate() {
final int bitrate = (int) (BPP * FRAME_RATE * mWidth * mHeight);
Log.i(TAG, String.format("bitrate=%5.2f[Mbps]", bitrate / 1024f / 1024f));
return bitrate;
}
/**
* select the first codec that match a specific MIME type
*
* @param mimeType
* @return null if no codec matched
*/
protected static final MediaCodecInfo selectVideoCodec(final String mimeType) {
if (DEBUG) Log.v(TAG, "selectVideoCodec:");
// get the list of available codecs
final int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) { // skipp decoder
continue;
}
// select first codec that match a specific MIME type and color format
final String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
if (DEBUG) Log.i(TAG, "codec:" + codecInfo.getName() + ",MIME=" + types[j]);
final int format = selectColorFormat(codecInfo, mimeType);
if (format > 0) {
return codecInfo;
}
}
}
}
return null;
}
/**
* select color format available on specific codec and we can use.
*
* @return 0 if no colorFormat is matched
*/
protected static final int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) {
if (DEBUG) Log.i(TAG, "selectColorFormat: ");
int result = 0;
final MediaCodecInfo.CodecCapabilities caps;
try {
Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
caps = codecInfo.getCapabilitiesForType(mimeType);
} finally {
Thread.currentThread().setPriority(Thread.NORM_PRIORITY);
}
int colorFormat;
for (int i = 0; i < caps.colorFormats.length; i++) {
colorFormat = caps.colorFormats[i];
if (isRecognizedViewoFormat(colorFormat)) {
if (result == 0)
result = colorFormat;
break;
}
}
if (result == 0)
Log.e(TAG, "couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType);
return result;
}
/**
* color formats that we can use in this class
*/
protected static int[] recognizedFormats;
static {
recognizedFormats = new int[]{
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
// MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface,
};
}
private static final boolean isRecognizedViewoFormat(final int colorFormat) {
if (DEBUG) Log.i(TAG, "isRecognizedViewoFormat:colorFormat=" + colorFormat);
final int n = recognizedFormats != null ? recognizedFormats.length : 0;
for (int i = 0; i < n; i++) {
if (recognizedFormats[i] == colorFormat) {
return true;
}
}
return false;
}
@Override
protected void signalEndOfInputStream() {
if (DEBUG) Log.d(TAG, "sending EOS to encoder");
mMediaCodec.signalEndOfInputStream(); // API >= 18
mIsEOS = true;
}
public void setPreviewWH(int previewW, int previewH) {
this.previewW = previewW;
this.previewH = previewH;
}
public float[] getMvpMatrix() {
if (previewW < 1 || previewH < 1) return null;
if (isMatrixCalc) return mvpMatrix;
float encodeWHRatio = mWidth * 1.0f / mHeight;
float previewWHRatio = previewW * 1.0f / previewH;
float[] projection = new float[16];
float[] camera = new float[16];
if (encodeWHRatio > previewWHRatio) {
Matrix.orthoM(projection, 0, -1, 1, -previewWHRatio / encodeWHRatio, previewWHRatio / encodeWHRatio, 1, 3);
} else {
Matrix.orthoM(projection, 0, -encodeWHRatio / previewWHRatio, encodeWHRatio / previewWHRatio, -1, 1, 1, 3);
}
Matrix.setLookAtM(camera, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0);
Matrix.multiplyMM(mvpMatrix, 0, projection, 0, camera, 0);
isMatrixCalc = true;
return mvpMatrix;
}
}

View File

@@ -0,0 +1,324 @@
package me.lake.librestreaming.encoder.utils;
import android.annotation.TargetApi;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import android.os.Build;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
public class EGLBase { // API >= 17
private static final boolean DEBUG = false; // TODO set false on release
private static final String TAG = "EGLBase";
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
private EGLConfig mEglConfig = null;
private EGLContext mEglContext = EGL14.EGL_NO_CONTEXT;
private EGLDisplay mEglDisplay = EGL14.EGL_NO_DISPLAY;
private EGLContext mDefaultContext = EGL14.EGL_NO_CONTEXT;
public static class EglSurface {
private final EGLBase mEgl;
private EGLSurface mEglSurface = EGL14.EGL_NO_SURFACE;
private final int mWidth, mHeight;
EglSurface(final EGLBase egl, final Object surface) {
if (DEBUG) Log.v(TAG, "EglSurface:");
if (!(surface instanceof SurfaceView)
&& !(surface instanceof Surface)
&& !(surface instanceof SurfaceHolder)
&& !(surface instanceof SurfaceTexture))
throw new IllegalArgumentException("unsupported surface");
mEgl = egl;
mEglSurface = mEgl.createWindowSurface(surface);
mWidth = mEgl.querySurface(mEglSurface, EGL14.EGL_WIDTH);
mHeight = mEgl.querySurface(mEglSurface, EGL14.EGL_HEIGHT);
if (DEBUG) Log.v(TAG, String.format("EglSurface:size(%d,%d)", mWidth, mHeight));
}
EglSurface(final EGLBase egl, final int width, final int height) {
if (DEBUG) Log.v(TAG, "EglSurface:");
mEgl = egl;
mEglSurface = mEgl.createOffscreenSurface(width, height);
mWidth = width;
mHeight = height;
}
public void makeCurrent() {
mEgl.makeCurrent(mEglSurface);
}
public void swap() {
mEgl.swap(mEglSurface);
}
public EGLContext getContext() {
return mEgl.getContext();
}
public void release() {
if (DEBUG) Log.v(TAG, "EglSurface:release:");
mEgl.makeDefault();
mEgl.destroyWindowSurface(mEglSurface);
mEglSurface = EGL14.EGL_NO_SURFACE;
}
public int getWidth() {
return mWidth;
}
public int getHeight() {
return mHeight;
}
}
public EGLBase(final EGLContext shared_context, final boolean with_depth_buffer, final boolean isRecordable) {
if (DEBUG) Log.v(TAG, "EGLBase:");
init(shared_context, with_depth_buffer, isRecordable);
}
public void release() {
if (DEBUG) Log.v(TAG, "release:");
if (mEglDisplay != EGL14.EGL_NO_DISPLAY) {
destroyContext();
EGL14.eglTerminate(mEglDisplay);
EGL14.eglReleaseThread();
}
mEglDisplay = EGL14.EGL_NO_DISPLAY;
mEglContext = EGL14.EGL_NO_CONTEXT;
}
public EglSurface createFromSurface(final Object surface) {
if (DEBUG) Log.v(TAG, "createFromSurface:");
final EglSurface eglSurface = new EglSurface(this, surface);
eglSurface.makeCurrent();
return eglSurface;
}
public EglSurface createOffscreen(final int width, final int height) {
if (DEBUG) Log.v(TAG, "createOffscreen:");
final EglSurface eglSurface = new EglSurface(this, width, height);
eglSurface.makeCurrent();
return eglSurface;
}
public EGLContext getContext() {
return mEglContext;
}
public int querySurface(final EGLSurface eglSurface, final int what) {
final int[] value = new int[1];
EGL14.eglQuerySurface(mEglDisplay, eglSurface, what, value, 0);
return value[0];
}
private void init(EGLContext shared_context, final boolean with_depth_buffer, final boolean isRecordable) {
if (DEBUG) Log.v(TAG, "init:");
if (mEglDisplay != EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("EGL already set up");
}
mEglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (mEglDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("eglGetDisplay failed");
}
final int[] version = new int[2];
if (!EGL14.eglInitialize(mEglDisplay, version, 0, version, 1)) {
mEglDisplay = null;
throw new RuntimeException("eglInitialize failed");
}
shared_context = shared_context != null ? shared_context : EGL14.EGL_NO_CONTEXT;
if (mEglContext == EGL14.EGL_NO_CONTEXT) {
mEglConfig = getConfig(with_depth_buffer, isRecordable);
if (mEglConfig == null) {
throw new RuntimeException("chooseConfig failed");
}
// create EGL rendering context
mEglContext = createContext(shared_context);
}
// confirm whether the EGL rendering context is successfully created
final int[] values = new int[1];
EGL14.eglQueryContext(mEglDisplay, mEglContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, values, 0);
if (DEBUG) Log.d(TAG, "EGLContext created, client version " + values[0]);
makeDefault(); // makeCurrent(EGL14.EGL_NO_SURFACE);
}
/**
* change context to draw this window surface
* @return
*/
private boolean makeCurrent(final EGLSurface surface) {
// if (DEBUG) Log.v(TAG, "makeCurrent:");
if (mEglDisplay == null) {
if (DEBUG) Log.d(TAG, "makeCurrent:eglDisplay not initialized");
}
if (surface == null || surface == EGL14.EGL_NO_SURFACE) {
final int error = EGL14.eglGetError();
if (error == EGL14.EGL_BAD_NATIVE_WINDOW) {
Log.e(TAG, "makeCurrent:returned EGL_BAD_NATIVE_WINDOW.");
}
return false;
}
// attach EGL renderring context to specific EGL window surface
if (!EGL14.eglMakeCurrent(mEglDisplay, surface, surface, mEglContext)) {
Log.w(TAG, "eglMakeCurrent:" + EGL14.eglGetError());
return false;
}
return true;
}
private void makeDefault() {
if (DEBUG) Log.v(TAG, "makeDefault:");
if (!EGL14.eglMakeCurrent(mEglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
Log.w("TAG", "makeDefault" + EGL14.eglGetError());
}
}
private int swap(final EGLSurface surface) {
// if (DEBUG) Log.v(TAG, "swap:");
if (!EGL14.eglSwapBuffers(mEglDisplay, surface)) {
final int err = EGL14.eglGetError();
if (DEBUG) Log.w(TAG, "swap:err=" + err);
return err;
}
return EGL14.EGL_SUCCESS;
}
private EGLContext createContext(final EGLContext shared_context) {
// if (DEBUG) Log.v(TAG, "createContext:");
final int[] attrib_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
final EGLContext context = EGL14.eglCreateContext(mEglDisplay, mEglConfig, shared_context, attrib_list, 0);
checkEglError("eglCreateContext");
return context;
}
private void destroyContext() {
if (DEBUG) Log.v(TAG, "destroyContext:");
if (!EGL14.eglDestroyContext(mEglDisplay, mEglContext)) {
Log.e("destroyContext", "display:" + mEglDisplay + " context: " + mEglContext);
Log.e(TAG, "eglDestroyContex:" + EGL14.eglGetError());
}
mEglContext = EGL14.EGL_NO_CONTEXT;
if (mDefaultContext != EGL14.EGL_NO_CONTEXT) {
if (!EGL14.eglDestroyContext(mEglDisplay, mDefaultContext)) {
Log.e("destroyContext", "display:" + mEglDisplay + " context: " + mDefaultContext);
Log.e(TAG, "eglDestroyContex:" + EGL14.eglGetError());
}
mDefaultContext = EGL14.EGL_NO_CONTEXT;
}
}
private EGLSurface createWindowSurface(final Object nativeWindow) {
if (DEBUG) Log.v(TAG, "createWindowSurface:nativeWindow=" + nativeWindow);
final int[] surfaceAttribs = {
EGL14.EGL_NONE
};
EGLSurface result = null;
try {
result = EGL14.eglCreateWindowSurface(mEglDisplay, mEglConfig, nativeWindow, surfaceAttribs, 0);
} catch (final IllegalArgumentException e) {
Log.e(TAG, "eglCreateWindowSurface", e);
}
return result;
}
/**
* Creates an EGL surface associated with an offscreen buffer.
*/
private EGLSurface createOffscreenSurface(final int width, final int height) {
if (DEBUG) Log.v(TAG, "createOffscreenSurface:");
final int[] surfaceAttribs = {
EGL14.EGL_WIDTH, width,
EGL14.EGL_HEIGHT, height,
EGL14.EGL_NONE
};
EGLSurface result = null;
try {
result = EGL14.eglCreatePbufferSurface(mEglDisplay, mEglConfig, surfaceAttribs, 0);
checkEglError("eglCreatePbufferSurface");
if (result == null) {
throw new RuntimeException("surface was null");
}
} catch (final IllegalArgumentException e) {
Log.e(TAG, "createOffscreenSurface", e);
} catch (final RuntimeException e) {
Log.e(TAG, "createOffscreenSurface", e);
}
return result;
}
private void destroyWindowSurface(EGLSurface surface) {
if (DEBUG) Log.v(TAG, "destroySurface:");
if (surface != EGL14.EGL_NO_SURFACE) {
EGL14.eglMakeCurrent(mEglDisplay,
EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
EGL14.eglDestroySurface(mEglDisplay, surface);
}
surface = EGL14.EGL_NO_SURFACE;
if (DEBUG) Log.v(TAG, "destroySurface:finished");
}
private void checkEglError(final String msg) {
int error;
if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
}
}
@SuppressWarnings("unused")
private EGLConfig getConfig(final boolean with_depth_buffer, final boolean isRecordable) {
final int[] attribList = {
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
EGL14.EGL_NONE, EGL14.EGL_NONE, //EGL14.EGL_STENCIL_SIZE, 8,
EGL14.EGL_NONE, EGL14.EGL_NONE, //EGL_RECORDABLE_ANDROID, 1, // this flag need to recording of MediaCodec
EGL14.EGL_NONE, EGL14.EGL_NONE, // with_depth_buffer ? EGL14.EGL_DEPTH_SIZE : EGL14.EGL_NONE,
// with_depth_buffer ? 16 : 0,
EGL14.EGL_NONE
};
int offset = 10;
if (false) { // ステンシルバッファ(常時未使用)
attribList[offset++] = EGL14.EGL_STENCIL_SIZE;
attribList[offset++] = 8;
}
if (with_depth_buffer) { // デプスバッファ
attribList[offset++] = EGL14.EGL_DEPTH_SIZE;
attribList[offset++] = 16;
}
if (isRecordable && (Build.VERSION.SDK_INT >= 18)) {// MediaCodecの入力用Surfaceの場合
attribList[offset++] = EGL_RECORDABLE_ANDROID;
attribList[offset++] = 1;
}
for (int i = attribList.length - 1; i >= offset; i--) {
attribList[i] = EGL14.EGL_NONE;
}
final EGLConfig[] configs = new EGLConfig[1];
final int[] numConfigs = new int[1];
if (!EGL14.eglChooseConfig(mEglDisplay, attribList, 0, configs, 0, configs.length, numConfigs, 0)) {
// XXX it will be better to fallback to RGB565
Log.w(TAG, "unable to find RGBA8888 / " + " EGLConfig");
return null;
}
return configs[0];
}
}

View File

@@ -0,0 +1,189 @@
package me.lake.librestreaming.encoder.utils;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.Matrix;
import android.util.Log;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
/**
* Helper class to draw to whole view using specific texture and texture matrix
*/
public class GLDrawer2D {
private static final boolean DEBUG = false; // TODO set false on release
private static final String TAG = "GLDrawer2D";
private static final String vss
= "uniform mat4 uMVPMatrix;\n"
+ "uniform mat4 uTexMatrix;\n"
+ "attribute highp vec4 aPosition;\n"
+ "attribute highp vec4 aTextureCoord;\n"
+ "varying highp vec2 vTextureCoord;\n"
+ "\n"
+ "void main() {\n"
+ " gl_Position = uMVPMatrix * aPosition;\n"
+ " vTextureCoord = (uTexMatrix * aTextureCoord).xy;\n"
+ "}\n";
private static final String fss
= "precision mediump float;\n"
+ "uniform sampler2D sTexture;\n"
+ "varying highp vec2 vTextureCoord;\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(sTexture, vTextureCoord);\n"
+ "}";
private static final float[] VERTICES = { 1.0f, 1.0f, -1.0f, 1.0f, 1.0f, -1.0f, -1.0f, -1.0f };
//private static final float[] TEXCOORD = { 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f };
private static final float[] TEXCOORD = { 1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f };
private final FloatBuffer pVertex;
private final FloatBuffer pTexCoord;
private int hProgram;
int maPositionLoc;
int maTextureCoordLoc;
int muMVPMatrixLoc;
int muTexMatrixLoc;
private final float[] mMvpMatrix = new float[16];
private static final int FLOAT_SZ = Float.SIZE / 8;
private static final int VERTEX_NUM = 4;
private static final int VERTEX_SZ = VERTEX_NUM * 2;
/**
* Constructor
* this should be called in GL context
*/
public GLDrawer2D() {
pVertex = ByteBuffer.allocateDirect(VERTEX_SZ * FLOAT_SZ)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
pVertex.put(VERTICES);
pVertex.flip();
pTexCoord = ByteBuffer.allocateDirect(VERTEX_SZ * FLOAT_SZ)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
pTexCoord.put(TEXCOORD);
pTexCoord.flip();
hProgram = loadShader(vss, fss);
GLES20.glUseProgram(hProgram);
maPositionLoc = GLES20.glGetAttribLocation(hProgram, "aPosition");
maTextureCoordLoc = GLES20.glGetAttribLocation(hProgram, "aTextureCoord");
muMVPMatrixLoc = GLES20.glGetUniformLocation(hProgram, "uMVPMatrix");
muTexMatrixLoc = GLES20.glGetUniformLocation(hProgram, "uTexMatrix");
Matrix.setIdentityM(mMvpMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixLoc, 1, false, mMvpMatrix, 0);
GLES20.glUniformMatrix4fv(muTexMatrixLoc, 1, false, mMvpMatrix, 0);
GLES20.glVertexAttribPointer(maPositionLoc, 2, GLES20.GL_FLOAT, false, VERTEX_SZ, pVertex);
GLES20.glVertexAttribPointer(maTextureCoordLoc, 2, GLES20.GL_FLOAT, false, VERTEX_SZ, pTexCoord);
GLES20.glEnableVertexAttribArray(maPositionLoc);
GLES20.glEnableVertexAttribArray(maTextureCoordLoc);
}
/**
* terminatinng, this should be called in GL context
*/
public void release() {
if (hProgram >= 0)
GLES20.glDeleteProgram(hProgram);
hProgram = -1;
}
/**
* draw specific texture with specific texture matrix
* @param tex_id texture ID
* @param tex_matrix texture matrix、if this is null, the last one use(we don't check size of this array and needs at least 16 of float)
*/
public void draw(final int tex_id, final float[] tex_matrix) {
GLES20.glUseProgram(hProgram);
if (tex_matrix != null)
GLES20.glUniformMatrix4fv(muTexMatrixLoc, 1, false, tex_matrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixLoc, 1, false, mMvpMatrix, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex_id);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, VERTEX_NUM);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glUseProgram(0);
}
/**
* Set model/view/projection transform matrix
* @param matrix
* @param offset
*/
public void setMatrix(final float[] matrix, final int offset) {
if ((matrix != null) && (matrix.length >= offset + 16)) {
System.arraycopy(matrix, offset, mMvpMatrix, 0, 16);
} else {
Matrix.setIdentityM(mMvpMatrix, 0);
}
}
/**
* create external texture
* @return texture ID
*/
public static int initTex() {
if (DEBUG) Log.v(TAG, "initTex:");
final int[] tex = new int[1];
GLES20.glGenTextures(1, tex, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex[0]);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
return tex[0];
}
/**
* delete specific texture
*/
public static void deleteTex(final int hTex) {
if (DEBUG) Log.v(TAG, "deleteTex:");
final int[] tex = new int[] {hTex};
GLES20.glDeleteTextures(1, tex, 0);
}
/**
* load, compile and link shader
* @param vss source of vertex shader
* @param fss source of fragment shader
* @return
*/
public static int loadShader(final String vss, final String fss) {
if (DEBUG) Log.v(TAG, "loadShader:");
int vs = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
GLES20.glShaderSource(vs, vss);
GLES20.glCompileShader(vs);
final int[] compiled = new int[1];
GLES20.glGetShaderiv(vs, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
if (DEBUG) Log.e(TAG, "Failed to compile vertex shader:"
+ GLES20.glGetShaderInfoLog(vs));
GLES20.glDeleteShader(vs);
vs = 0;
}
int fs = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(fs, fss);
GLES20.glCompileShader(fs);
GLES20.glGetShaderiv(fs, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
if (DEBUG) Log.w(TAG, "Failed to compile fragment shader:"
+ GLES20.glGetShaderInfoLog(fs));
GLES20.glDeleteShader(fs);
fs = 0;
}
final int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vs);
GLES20.glAttachShader(program, fs);
GLES20.glLinkProgram(program);
return program;
}
}

View File

@@ -0,0 +1,211 @@
package me.lake.librestreaming.encoder.utils;
import android.graphics.SurfaceTexture;
import android.opengl.EGLContext;
import android.opengl.GLES20;
import android.opengl.Matrix;
import android.text.TextUtils;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
/**
* Helper class to draw texture to whole view on private thread
*/
public final class RenderHandler implements Runnable {
private static final boolean DEBUG = false; // TODO set false on release
private static final String TAG = "RenderHandler";
private final Object mSync = new Object();
private EGLContext mShard_context;
private boolean mIsRecordable;
private Object mSurface;
private int mTexId = -1;
private float[] mMatrix = new float[32];
private boolean mRequestSetEglContext;
private boolean mRequestRelease;
private int mRequestDraw;
public static final RenderHandler createHandler(final String name) {
if (DEBUG) Log.v(TAG, "createHandler:");
final RenderHandler handler = new RenderHandler();
synchronized (handler.mSync) {
new Thread(handler, !TextUtils.isEmpty(name) ? name : TAG).start();
try {
handler.mSync.wait();
} catch (final InterruptedException e) {
}
}
return handler;
}
public final void setEglContext(final EGLContext shared_context, final int tex_id, final Object surface, final boolean isRecordable) {
if (DEBUG) Log.i(TAG, "setEglContext:");
if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture) && !(surface instanceof SurfaceHolder))
throw new RuntimeException("unsupported window type:" + surface);
synchronized (mSync) {
if (mRequestRelease) return;
mShard_context = shared_context;
mTexId = tex_id;
mSurface = surface;
mIsRecordable = isRecordable;
mRequestSetEglContext = true;
Matrix.setIdentityM(mMatrix, 0);
Matrix.setIdentityM(mMatrix, 16);
mSync.notifyAll();
try {
mSync.wait();
} catch (final InterruptedException e) {
}
}
}
public final void draw() {
draw(mTexId, mMatrix, null);
}
public final void draw(final int tex_id) {
draw(tex_id, mMatrix, null);
}
public final void draw(final float[] tex_matrix) {
draw(mTexId, tex_matrix, null);
}
public final void draw(final float[] tex_matrix, final float[] mvp_matrix) {
draw(mTexId, tex_matrix, mvp_matrix);
}
public final void draw(final int tex_id, final float[] tex_matrix) {
draw(tex_id, tex_matrix, null);
}
public final void draw(final int tex_id, final float[] tex_matrix, final float[] mvp_matrix) {
synchronized (mSync) {
if (mRequestRelease) return;
mTexId = tex_id;
if ((tex_matrix != null) && (tex_matrix.length >= 16)) {
System.arraycopy(tex_matrix, 0, mMatrix, 0, 16);
} else {
Matrix.setIdentityM(mMatrix, 0);
}
if ((mvp_matrix != null) && (mvp_matrix.length >= 16)) {
System.arraycopy(mvp_matrix, 0, mMatrix, 16, 16);
} else {
Matrix.setIdentityM(mMatrix, 16);
}
mRequestDraw++;
mSync.notifyAll();
/* try {
mSync.wait();
} catch (final InterruptedException e) {
} */
}
}
public boolean isValid() {
synchronized (mSync) {
return !(mSurface instanceof Surface) || ((Surface)mSurface).isValid();
}
}
public final void release() {
if (DEBUG) Log.i(TAG, "release:");
synchronized (mSync) {
if (mRequestRelease) return;
mRequestRelease = true;
mSync.notifyAll();
try {
mSync.wait();
} catch (final InterruptedException e) {
}
}
}
//********************************************************************************
//********************************************************************************
private EGLBase mEgl;
private EGLBase.EglSurface mInputSurface;
private GLDrawer2D mDrawer;
@Override
public final void run() {
if (DEBUG) Log.i(TAG, "RenderHandler thread started:");
synchronized (mSync) {
mRequestSetEglContext = mRequestRelease = false;
mRequestDraw = 0;
mSync.notifyAll();
}
boolean localRequestDraw;
for (;;) {
synchronized (mSync) {
if (mRequestRelease) break;
if (mRequestSetEglContext) {
mRequestSetEglContext = false;
internalPrepare();
}
localRequestDraw = mRequestDraw > 0;
if (localRequestDraw) {
mRequestDraw--;
// mSync.notifyAll();
}
}
if (localRequestDraw) {
if ((mEgl != null) && mTexId >= 0) {
mInputSurface.makeCurrent();
// clear screen with yellow color so that you can see rendering rectangle
GLES20.glClearColor(1.0f, 1.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
mDrawer.setMatrix(mMatrix, 16);
mDrawer.draw(mTexId, mMatrix);
mInputSurface.swap();
}
} else {
synchronized(mSync) {
try {
mSync.wait();
} catch (final InterruptedException e) {
break;
}
}
}
}
synchronized (mSync) {
mRequestRelease = true;
internalRelease();
mSync.notifyAll();
}
if (DEBUG) Log.i(TAG, "RenderHandler thread finished:");
}
private final void internalPrepare() {
if (DEBUG) Log.i(TAG, "internalPrepare:");
internalRelease();
mEgl = new EGLBase(mShard_context, false, mIsRecordable);
mInputSurface = mEgl.createFromSurface(mSurface);
mInputSurface.makeCurrent();
mDrawer = new GLDrawer2D();
mSurface = null;
mSync.notifyAll();
}
private final void internalRelease() {
if (DEBUG) Log.i(TAG, "internalRelease:");
if (mInputSurface != null) {
mInputSurface.release();
mInputSurface = null;
}
if (mDrawer != null) {
mDrawer.release();
mDrawer = null;
}
if (mEgl != null) {
mEgl.release();
mEgl = null;
}
}
}

View File

@@ -0,0 +1,31 @@
package me.lake.librestreaming.filter.hardvideofilter;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import me.lake.librestreaming.core.GLHelper;
public class BaseHardVideoFilter {
protected int SIZE_WIDTH;
protected int SIZE_HEIGHT;
protected int directionFlag=-1;
protected ShortBuffer drawIndecesBuffer;
public void onInit(int VWidth, int VHeight) {
SIZE_WIDTH = VWidth;
SIZE_HEIGHT = VHeight;
drawIndecesBuffer = GLHelper.getDrawIndecesBuffer();
}
public void onDraw(final int cameraTexture,final int targetFrameBuffer, final FloatBuffer shapeBuffer, final FloatBuffer textrueBuffer) {
}
public void onDestroy() {
}
public void onDirectionUpdate(int _directionFlag) {
this.directionFlag = _directionFlag;
}
}

View File

@@ -0,0 +1,92 @@
package me.lake.librestreaming.filter.hardvideofilter;
import android.opengl.GLES20;
import java.nio.FloatBuffer;
import java.util.LinkedList;
import java.util.List;
import me.lake.librestreaming.tools.GLESTools;
public class HardVideoGroupFilter extends BaseHardVideoFilter {
private LinkedList<FilterWrapper> filterWrappers;
public HardVideoGroupFilter(List<BaseHardVideoFilter> filters) {
if (filters == null || filters.isEmpty()) {
throw new IllegalArgumentException("can not create empty GroupFilter");
}
filterWrappers = new LinkedList<FilterWrapper>();
for (BaseHardVideoFilter filter : filters) {
filterWrappers.add(new FilterWrapper(filter));
}
}
@Override
public void onInit(int VWidth, int VHeight) {
super.onInit(VWidth, VHeight);
int i = 0;
for (FilterWrapper wrapper : filterWrappers) {
wrapper.filter.onInit(VWidth, VHeight);
int[] frameBuffer = new int[1];
int[] frameBufferTexture = new int[1];
GLESTools.createFrameBuff(frameBuffer,
frameBufferTexture,
SIZE_WIDTH,
SIZE_HEIGHT);
wrapper.frameBuffer = frameBuffer[0];
wrapper.frameBufferTexture = frameBufferTexture[0];
i++;
}
}
@Override
public void onDraw(int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textrueBuffer) {
FilterWrapper preFilterWrapper = null;
int i = 0;
int texture;
for (FilterWrapper wrapper : filterWrappers) {
if (preFilterWrapper == null) {
texture = cameraTexture;
} else {
texture = preFilterWrapper.frameBufferTexture;
}
if (i == (filterWrappers.size() - 1)) {
wrapper.filter.onDraw(texture, targetFrameBuffer, shapeBuffer, textrueBuffer);
} else {
wrapper.filter.onDraw(texture, wrapper.frameBuffer, shapeBuffer, textrueBuffer);
}
preFilterWrapper = wrapper;
i++;
}
}
@Override
public void onDestroy() {
super.onDestroy();
for (FilterWrapper wrapper : filterWrappers) {
wrapper.filter.onDestroy();
GLES20.glDeleteFramebuffers(1, new int[]{wrapper.frameBuffer}, 0);
GLES20.glDeleteTextures(1, new int[]{wrapper.frameBufferTexture}, 0);
}
}
@Override
public void onDirectionUpdate(int _directionFlag) {
super.onDirectionUpdate(_directionFlag);
for (FilterWrapper wrapper : filterWrappers) {
wrapper.filter.onDirectionUpdate(_directionFlag);
}
}
private class FilterWrapper {
BaseHardVideoFilter filter;
int frameBuffer;
int frameBufferTexture;
FilterWrapper(BaseHardVideoFilter filter) {
this.filter = filter;
}
}
}

View File

@@ -0,0 +1,97 @@
package me.lake.librestreaming.filter.hardvideofilter;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import java.nio.FloatBuffer;
import me.lake.librestreaming.tools.GLESTools;
public class OriginalHardVideoFilter extends BaseHardVideoFilter {
protected int glProgram;
protected int glTextureLoc;
protected int glCamPostionLoc;
protected int glCamTextureCoordLoc;
protected String vertexShader_filter = "" +
"attribute vec4 aCamPosition;\n" +
"attribute vec2 aCamTextureCoord;\n" +
"varying vec2 vCamTextureCoord;\n" +
"void main(){\n" +
" gl_Position= aCamPosition;\n" +
" vCamTextureCoord = aCamTextureCoord;\n" +
"}";
protected String fragmentshader_filter = "" +
"precision highp float;\n" +
"varying highp vec2 vCamTextureCoord;\n" +
"uniform sampler2D uCamTexture;\n" +
"void main(){\n" +
" vec4 color = texture2D(uCamTexture, vCamTextureCoord);\n" +
" gl_FragColor = color;\n" +
"}";
public OriginalHardVideoFilter(String vertexShaderCode, String fragmentShaderCode) {
if (vertexShaderCode != null) {
vertexShader_filter = vertexShaderCode;
}
if (fragmentShaderCode != null) {
fragmentshader_filter = fragmentShaderCode;
}
}
@Override
public void onInit(int VWidth, int VHeight) {
super.onInit(VWidth, VHeight);
glProgram = GLESTools.createProgram(vertexShader_filter, fragmentshader_filter);
GLES20.glUseProgram(glProgram);
glTextureLoc = GLES20.glGetUniformLocation(glProgram, "uCamTexture");
glCamPostionLoc = GLES20.glGetAttribLocation(glProgram, "aCamPosition");
glCamTextureCoordLoc = GLES20.glGetAttribLocation(glProgram, "aCamTextureCoord");
}
@Override
public void onDraw(int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textrueBuffer) {
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, targetFrameBuffer);
GLES20.glUseProgram(glProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, cameraTexture);
GLES20.glUniform1i(glTextureLoc, 0);
GLES20.glEnableVertexAttribArray(glCamPostionLoc);
GLES20.glEnableVertexAttribArray(glCamTextureCoordLoc);
shapeBuffer.position(0);
GLES20.glVertexAttribPointer(glCamPostionLoc, 2,
GLES20.GL_FLOAT, false,
2 * 4, shapeBuffer);
textrueBuffer.position(0);
GLES20.glVertexAttribPointer(glCamTextureCoordLoc, 2,
GLES20.GL_FLOAT, false,
2 * 4, textrueBuffer);
onPreDraw();
GLES20.glViewport(0, 0, SIZE_WIDTH, SIZE_HEIGHT);
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawIndecesBuffer.limit(), GLES20.GL_UNSIGNED_SHORT, drawIndecesBuffer);
GLES20.glFinish();
onAfterDraw();
GLES20.glDisableVertexAttribArray(glCamPostionLoc);
GLES20.glDisableVertexAttribArray(glCamTextureCoordLoc);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
GLES20.glUseProgram(0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}
protected void onPreDraw() {
}
protected void onAfterDraw() {
}
@Override
public void onDestroy() {
super.onDestroy();
GLES20.glDeleteProgram(glProgram);
}
}

View File

@@ -0,0 +1,28 @@
package me.lake.librestreaming.filter.softaudiofilter;
public class BaseSoftAudioFilter {
protected int SIZE;
protected int SIZE_HALF;
public void onInit(int size) {
SIZE = size;
SIZE_HALF = size/2;
}
/**
*
* @param orignBuff
* @param targetBuff
* @param presentationTimeMs
* @param sequenceNum
* @return false to use orignBuff,true to use targetBuff
*/
public boolean onFrame(byte[] orignBuff, byte[] targetBuff, long presentationTimeMs, int sequenceNum) {
return false;
}
public void onDestroy() {
}
}

View File

@@ -0,0 +1,36 @@
package me.lake.librestreaming.filter.softvideofilter;
public class BaseSoftVideoFilter {
protected int SIZE_WIDTH;
protected int SIZE_HEIGHT;
protected int SIZE_Y;
protected int SIZE_TOTAL;
protected int SIZE_U;
protected int SIZE_UV;
public void onInit(int VWidth, int VHeight) {
SIZE_WIDTH = VWidth;
SIZE_HEIGHT = VHeight;
SIZE_Y = SIZE_HEIGHT * SIZE_WIDTH;
SIZE_UV = SIZE_HEIGHT * SIZE_WIDTH / 2;
SIZE_U = SIZE_UV / 2;
SIZE_TOTAL = SIZE_Y * 3 / 2;
}
/**
*
* @param orignBuff
* @param targetBuff
* @param presentationTimeMs
* @param sequenceNum
* @return false to use orignBuff,true to use targetBuff
*/
public boolean onFrame(byte[] orignBuff, byte[] targetBuff, long presentationTimeMs, int sequenceNum) {
return false;
}
public void onDestroy() {
}
}

View File

@@ -0,0 +1,19 @@
package me.lake.librestreaming.model;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
public class MediaCodecGLWapper {
public EGLDisplay eglDisplay;
public EGLConfig eglConfig;
public EGLSurface eglSurface;
public EGLContext eglContext;
public int drawProgram;
public int drawTextureLoc;
public int drawPostionLoc;
public int drawTextureCoordLoc;
}

View File

@@ -0,0 +1,25 @@
package me.lake.librestreaming.model;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
public class OffScreenGLWapper {
public EGLDisplay eglDisplay;
public EGLConfig eglConfig;
public EGLSurface eglSurface;
public EGLContext eglContext;
public int cam2dProgram;
public int cam2dTextureMatrix;
public int cam2dTextureLoc;
public int cam2dPostionLoc;
public int cam2dTextureCoordLoc;
public int camProgram;
public int camTextureLoc;
public int camPostionLoc;
public int camTextureCoordLoc;
}

View File

@@ -0,0 +1,14 @@
package me.lake.librestreaming.model;
public class RESAudioBuff {
public boolean isReadyToFill;
public int audioFormat = -1;
public byte[] buff;
public RESAudioBuff(int audioFormat, int size) {
isReadyToFill = true;
this.audioFormat = audioFormat;
buff = new byte[size];
}
}

View File

@@ -0,0 +1,212 @@
package me.lake.librestreaming.model;
import android.hardware.Camera;
public class RESConfig {
public static class FilterMode {
public static final int HARD = RESCoreParameters.FILTER_MODE_HARD;
public static final int SOFT = RESCoreParameters.FILTER_MODE_SOFT;
}
public static class RenderingMode {
public static final int NativeWindow = RESCoreParameters.RENDERING_MODE_NATIVE_WINDOW;
public static final int OpenGLES = RESCoreParameters.RENDERING_MODE_OPENGLES;
}
public static class DirectionMode {
public static final int FLAG_DIRECTION_FLIP_HORIZONTAL = RESCoreParameters.FLAG_DIRECTION_FLIP_HORIZONTAL;
public static final int FLAG_DIRECTION_FLIP_VERTICAL = RESCoreParameters.FLAG_DIRECTION_FLIP_VERTICAL;
public static final int FLAG_DIRECTION_ROATATION_0 = RESCoreParameters.FLAG_DIRECTION_ROATATION_0;
public static final int FLAG_DIRECTION_ROATATION_90 = RESCoreParameters.FLAG_DIRECTION_ROATATION_90;
public static final int FLAG_DIRECTION_ROATATION_180 = RESCoreParameters.FLAG_DIRECTION_ROATATION_180;
public static final int FLAG_DIRECTION_ROATATION_270 = RESCoreParameters.FLAG_DIRECTION_ROATATION_270;
}
private int filterMode;
private Size targetVideoSize;
private int videoBufferQueueNum;
private int bitRate;
private String rtmpAddr;
private int renderingMode;
private int defaultCamera;
private int frontCameraDirectionMode;
private int backCameraDirectionMode;
private int videoFPS;
private int videoGOP;
private boolean printDetailMsg;
private Size targetPreviewSize;
private RESConfig() {
}
public static RESConfig obtain() {
RESConfig res = new RESConfig();
res.setFilterMode(FilterMode.SOFT);
res.setRenderingMode(RenderingMode.NativeWindow);
res.setTargetVideoSize(new Size(1920, 1080));
res.setVideoFPS(60);
res.setVideoGOP(1);
res.setVideoBufferQueueNum(1);
res.setBitRate(10*1024*1024);
res.setPrintDetailMsg(false);
res.setDefaultCamera(Camera.CameraInfo.CAMERA_FACING_BACK);
res.setBackCameraDirectionMode(DirectionMode.FLAG_DIRECTION_ROATATION_0);
res.setFrontCameraDirectionMode(DirectionMode.FLAG_DIRECTION_ROATATION_0);
return res;
}
/**
* set the filter mode.
*
* @param filterMode {@link FilterMode}
*/
public void setFilterMode(int filterMode) {
this.filterMode = filterMode;
}
/**
* set the default camera to start stream
*/
public void setDefaultCamera(int defaultCamera) {
this.defaultCamera = defaultCamera;
}
/**
* set front camera rotation & flip
*
* @param frontCameraDirectionMode {@link DirectionMode}
*/
public void setFrontCameraDirectionMode(int frontCameraDirectionMode) {
this.frontCameraDirectionMode = frontCameraDirectionMode;
}
/**
* set front camera rotation & flip
*
* @param backCameraDirectionMode {@link DirectionMode}
*/
public void setBackCameraDirectionMode(int backCameraDirectionMode) {
this.backCameraDirectionMode = backCameraDirectionMode;
}
/**
* set renderingMode when using soft mode<br/>
* no use for hard mode
*
* @param renderingMode {@link RenderingMode}
*/
public void setRenderingMode(int renderingMode) {
this.renderingMode = renderingMode;
}
/**
* no use for now
*
* @param printDetailMsg
*/
public void setPrintDetailMsg(boolean printDetailMsg) {
this.printDetailMsg = printDetailMsg;
}
/**
* set the target video size.<br/>
* real video size may different from it.Depend on device.
*
* @param videoSize
*/
public void setTargetVideoSize(Size videoSize) {
targetVideoSize = videoSize;
}
/**
* set video buffer number for soft mode.<br/>
* num larger:video Smoother,more memory.
*
* @param num
*/
public void setVideoBufferQueueNum(int num) {
videoBufferQueueNum = num;
}
/**
* set video bitrate
*
* @param bitRate
*/
public void setBitRate(int bitRate) {
this.bitRate = bitRate;
}
public int getVideoFPS() {
return videoFPS;
}
public void setVideoFPS(int videoFPS) {
this.videoFPS = videoFPS;
}
public int getVideoGOP(){
return videoGOP;
}
public void setVideoGOP(int videoGOP){
this.videoGOP = videoGOP;
}
public int getVideoBufferQueueNum() {
return videoBufferQueueNum;
}
public int getBitRate() {
return bitRate;
}
public Size getTargetVideoSize() {
return targetVideoSize;
}
public int getFilterMode() {
return filterMode;
}
public int getDefaultCamera() {
return defaultCamera;
}
public int getBackCameraDirectionMode() {
return backCameraDirectionMode;
}
public int getFrontCameraDirectionMode() {
return frontCameraDirectionMode;
}
public int getRenderingMode() {
return renderingMode;
}
public String getRtmpAddr() {
return rtmpAddr;
}
public void setRtmpAddr(String rtmpAddr) {
this.rtmpAddr = rtmpAddr;
}
public boolean isPrintDetailMsg() {
return printDetailMsg;
}
public void setTargetPreviewSize(Size previewSize) {
targetPreviewSize = previewSize;
}
public Size getTargetPreviewSize() {
return targetPreviewSize;
}
}

View File

@@ -0,0 +1,119 @@
package me.lake.librestreaming.model;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import me.lake.librestreaming.tools.LogTools;
import me.lake.librestreaming.ws.StreamAVOption;
public class RESCoreParameters {
public static final int FILTER_MODE_HARD = 1;
public static final int FILTER_MODE_SOFT = 2;
public static final int RENDERING_MODE_NATIVE_WINDOW = 1;
public static final int RENDERING_MODE_OPENGLES = 2;
/**
* same with jni
*/
public static final int FLAG_DIRECTION_FLIP_HORIZONTAL = 0x01;
public static final int FLAG_DIRECTION_FLIP_VERTICAL = 0x02;
public static final int FLAG_DIRECTION_ROATATION_0 = 0x10;
public static final int FLAG_DIRECTION_ROATATION_90 = 0x20;
public static final int FLAG_DIRECTION_ROATATION_180 = 0x40;
public static final int FLAG_DIRECTION_ROATATION_270 = 0x80;
public boolean done;
public boolean printDetailMsg;
public int filterMode;
public int renderingMode;
public String rtmpAddr;
public int frontCameraDirectionMode;
public int backCameraDirectionMode;
public boolean isPortrait;
public int previewVideoWidth;
public int previewVideoHeight;
public int videoWidth;
public int videoHeight;
public int videoFPS;
public int videoGOP;
public float cropRatio;
public int previewColorFormat;
public int previewBufferSize;
public int mediacodecAVCColorFormat;
public int mediacdoecAVCBitRate;
public int videoBufferQueueNum;
public int audioBufferQueueNum;
public int audioRecoderFormat;
public int audioRecoderSampleRate;
public int audioRecoderChannelConfig;
public int audioRecoderSliceSize;
public int audioRecoderSource;
public int audioRecoderBufferSize;
public int previewMaxFps;
public int previewMinFps;
public int mediacodecAVCFrameRate;
public int mediacodecAVCIFrameInterval;
public int mediacodecAVCProfile;
public int mediacodecAVClevel;
public int mediacodecAACProfile;
public int mediacodecAACSampleRate;
public int mediacodecAACChannelCount;
public int mediacodecAACBitRate;
public int mediacodecAACMaxInputSize;
//sender
public int senderQueueLength;
public RESCoreParameters() {
done = false;
printDetailMsg = false;
filterMode=-1;
videoWidth = StreamAVOption.videoWidth;
videoHeight = StreamAVOption.videoHeight;
previewVideoWidth = StreamAVOption.previewWidth;
previewVideoHeight = StreamAVOption.previewHeight;
videoFPS=StreamAVOption.videoFramerate;
videoGOP=StreamAVOption.videoGOP;
previewColorFormat = -1;
mediacodecAVCColorFormat = -1;
mediacdoecAVCBitRate = StreamAVOption.videoBitrate;
videoBufferQueueNum = -1;
audioBufferQueueNum = -1;
mediacodecAVCFrameRate = -1;
mediacodecAVCIFrameInterval = -1;
mediacodecAVCProfile = -1;
mediacodecAVClevel = -1;
mediacodecAACProfile = -1;
mediacodecAACSampleRate = -1;
mediacodecAACChannelCount = -1;
mediacodecAACBitRate = -1;
mediacodecAACMaxInputSize = -1;
}
public void dump() {
LogTools.e(this.toString());
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("ResParameter:");
Field[] fields = this.getClass().getDeclaredFields();
for (Field field : fields) {
if (Modifier.isStatic(field.getModifiers())) {
continue;
}
field.setAccessible(true);
try {
sb.append(field.getName());
sb.append('=');
sb.append(field.get(this));
sb.append(';');
} catch (IllegalAccessException e) {
}
}
return sb.toString();
}
}

View File

@@ -0,0 +1,18 @@
package me.lake.librestreaming.model;
import java.util.Arrays;
public class RESVideoBuff {
public boolean isReadyToFill;
public int colorFormat = -1;
public byte[] buff;
public RESVideoBuff(int colorFormat, int size) {
isReadyToFill = true;
this.colorFormat = colorFormat;
buff = new byte[size];
Arrays.fill(buff, size/2, size, (byte) 127);
}
}

View File

@@ -0,0 +1,19 @@
package me.lake.librestreaming.model;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
public class ScreenGLWapper {
public EGLDisplay eglDisplay;
public EGLConfig eglConfig;
public EGLSurface eglSurface;
public EGLContext eglContext;
public int drawProgram;
public int drawTextureLoc;
public int drawPostionLoc;
public int drawTextureCoordLoc;
}

View File

@@ -0,0 +1,86 @@
package me.lake.librestreaming.model;
public final class Size {
/**
* Create a new immutable Size instance.
*
* @param width The width of the size, in pixels
* @param height The height of the size, in pixels
*/
public Size(int width, int height) {
mWidth = width;
mHeight = height;
}
/**
* Get the width of the size (in pixels).
*
* @return width
*/
public int getWidth() {
return mWidth;
}
/**
* Get the height of the size (in pixels).
*
* @return height
*/
public int getHeight() {
return mHeight;
}
/**
* Check if this size is equal to another size.
* <p>
* Two sizes are equal if and only if both their widths and heights are
* equal.
* </p>
* <p>
* A size object is never equal to any other type of object.
* </p>
*
* @return {@code true} if the objects were equal, {@code false} otherwise
*/
@Override
public boolean equals(final Object obj) {
if (obj == null) {
return false;
}
if (this == obj) {
return true;
}
if (obj instanceof Size) {
Size other = (Size) obj;
return mWidth == other.mWidth && mHeight == other.mHeight;
}
return false;
}
/**
* Return the size represented as a string with the format {@code "WxH"}
*
* @return string representation of the size
*/
@Override
public String toString() {
return mWidth + "x" + mHeight;
}
private static NumberFormatException invalidSize(String s) {
throw new NumberFormatException("Invalid Size: \"" + s + "\"");
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode() {
// assuming most sizes are <2^16, doing a rotate will give us perfect hashing
return mHeight ^ ((mWidth << (Integer.SIZE / 2)) | (mWidth >>> (Integer.SIZE / 2)));
}
private final int mWidth;
private final int mHeight;
}

View File

@@ -0,0 +1,378 @@
package me.lake.librestreaming.render;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import java.util.Arrays;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;
import me.lake.librestreaming.tools.GLESTools;
public class GLESRender implements IRender {
private final Object syncRenderThread = new Object();
GLESRenderThread glesRenderThread;
@Override
public void create(SurfaceTexture visualSurfaceTexture, int pixelFormat, int pixelWidth, int pixelHeight, int visualWidth, int visualHeight) {
if (pixelFormat != ImageFormat.NV21) {
throw new IllegalArgumentException("GLESRender,pixelFormat only support NV21");
}
synchronized (syncRenderThread) {
glesRenderThread = new GLESRenderThread(visualSurfaceTexture,
pixelFormat,
pixelWidth,
pixelHeight,
visualWidth,
visualHeight);
glesRenderThread.start();
}
}
@Override
public void update(int visualWidth, int visualHeight) {
synchronized (syncRenderThread) {
glesRenderThread.updateVisualWH(visualWidth, visualHeight);
}
}
@Override
public void rendering(byte[] pixel) {
synchronized (syncRenderThread) {
glesRenderThread.updatePixel(pixel);
}
}
@Override
public void destroy(boolean releaseTexture) {
synchronized (syncRenderThread) {
glesRenderThread.quit(releaseTexture);
try {
glesRenderThread.join();
} catch (InterruptedException ignored) {
}
}
}
private static class GLESRenderThread extends Thread {
int mPixelWidth;
int mPixelHeight;
int mySize;
int mVisualWidth;
int mVisualHeight;
byte[] yTemp, uTemp, vTemp;
SurfaceTexture mVisualSurfaceTexture;
private final Object syncThread = new Object();
boolean quit = false;
boolean releaseTexture=true;
EGL10 mEgl;
EGLDisplay mEglDisplay;
EGLConfig mEglConfig;
EGLSurface mEglSurface;
EGLContext mEglContext;
int mProgram;
public GLESRenderThread(SurfaceTexture visualSurfaceTexture, int pixelFormat, int pixelWidth, int pixelHeight, int visualWidth, int visualHeight) {
quit = false;
mVisualSurfaceTexture = visualSurfaceTexture;
mPixelWidth = pixelWidth;
mPixelHeight = pixelHeight;
mySize = mPixelWidth * mPixelHeight;
mVisualWidth = visualWidth;
mVisualHeight = visualHeight;
yBuf = ByteBuffer.allocateDirect(mySize);
uBuf = ByteBuffer.allocateDirect(mySize >> 2);
vBuf = ByteBuffer.allocateDirect(mySize >> 2);
yTemp = new byte[mySize];
uTemp = new byte[mySize >> 2];
vTemp = new byte[mySize >> 2];
Arrays.fill(uTemp, (byte) 0x7F);
Arrays.fill(vTemp, (byte) 0x7F);
uBuf.position(0);
uBuf.put(uTemp).position(0);
vBuf.position(0);
vBuf.put(vTemp).position(0);
}
public void quit(boolean releaseTexture) {
synchronized (syncThread) {
this.releaseTexture = releaseTexture;
quit = true;
syncThread.notify();
}
}
public void updatePixel(byte[] pixel) {
synchronized (syncBuff) {
NV21TOYUV(pixel, yTemp, uTemp, vTemp, mPixelWidth, mPixelHeight);
yBuf.position(0);
yBuf.put(yTemp).position(0);
uBuf.position(0);
uBuf.put(uTemp).position(0);
vBuf.position(0);
vBuf.put(vTemp).position(0);
}
synchronized (syncThread) {
syncThread.notify();
}
}
public void updateVisualWH(int visualWidth, int visualHeight) {
mVisualWidth = visualWidth;
mVisualHeight = visualHeight;
}
@Override
public void run() {
initGLES();
mProgram = GLESTools.createProgram(vertexShaderCode, fragmentshaderCode);
initVertex();
initTexture();
while (!quit) {
drawFrame();
if (!mEgl.eglSwapBuffers(mEglDisplay, mEglSurface)) {
throw new RuntimeException("eglSwapBuffers,failed!");
}
synchronized (syncThread) {
try {
if(!quit) {
syncThread.wait();
}
} catch (InterruptedException ignored) {
}
}
}
releaseGLES();
if (releaseTexture) {
mVisualSurfaceTexture.release();
}
}
private void drawFrame() {
GLES20.glViewport(0, 0, mVisualWidth, mVisualHeight);
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
synchronized (syncBuff) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yTexture[0]);
GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0,
mPixelWidth,
mPixelHeight,
GLES20.GL_LUMINANCE,
GLES20.GL_UNSIGNED_BYTE,
yBuf);
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, uTexture[0]);
GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0,
mPixelWidth >> 1,
mPixelHeight >> 1,
GLES20.GL_LUMINANCE,
GLES20.GL_UNSIGNED_BYTE,
uBuf);
GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, vTexture[0]);
GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0,
mPixelWidth >> 1,
mPixelHeight >> 1,
GLES20.GL_LUMINANCE,
GLES20.GL_UNSIGNED_BYTE,
vBuf);
}
//=================================
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawIndices.length, GLES20.GL_UNSIGNED_SHORT, mDrawIndicesBuffer);
GLES20.glFinish();
}
private void initGLES() {
mEgl = (EGL10) EGLContext.getEGL();
mEglDisplay = mEgl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
if (EGL10.EGL_NO_DISPLAY == mEglDisplay) {
throw new RuntimeException("GLESRender,eglGetDisplay,failed:" + GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
int versions[] = new int[2];
if (!mEgl.eglInitialize(mEglDisplay, versions)) {
throw new RuntimeException("GLESRender,eglInitialize,failed:" + GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
int configsCount[] = new int[1];
EGLConfig configs[] = new EGLConfig[1];
int configSpec[] = new int[]{
EGL10.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_DEPTH_SIZE, 0,
EGL10.EGL_STENCIL_SIZE, 0,
EGL10.EGL_NONE
};
mEgl.eglChooseConfig(mEglDisplay, configSpec, configs, 1, configsCount);
if (configsCount[0] <= 0) {
throw new RuntimeException("GLESRender,eglChooseConfig,failed:" + GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
mEglConfig = configs[0];
mEglSurface = mEgl.eglCreateWindowSurface(mEglDisplay, mEglConfig, mVisualSurfaceTexture, null);
if (null == mEglSurface || EGL10.EGL_NO_SURFACE == mEglSurface) {
throw new RuntimeException("GLESRender,eglCreateWindowSurface,failed:" + GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
int contextSpec[] = new int[]{
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
mEglContext = mEgl.eglCreateContext(mEglDisplay, mEglConfig, EGL10.EGL_NO_CONTEXT, contextSpec);
if (EGL10.EGL_NO_CONTEXT == mEglContext) {
throw new RuntimeException("GLESRender,eglCreateContext,failed:" + GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
if (!mEgl.eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mEglContext)) {
throw new RuntimeException("GLESRender,eglMakeCurrent,failed:" + GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
}
private void initVertex() {
mSquareVerticesBuffer = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * squareVertices.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
mSquareVerticesBuffer.put(squareVertices);
mSquareVerticesBuffer.position(0);
mTextureCoordsBuffer = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * textureVertices.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
mTextureCoordsBuffer.put(textureVertices);
mTextureCoordsBuffer.position(0);
mDrawIndicesBuffer = ByteBuffer.allocateDirect(SHORT_SIZE_BYTES * drawIndices.length).
order(ByteOrder.nativeOrder()).
asShortBuffer();
mDrawIndicesBuffer.put(drawIndices);
mDrawIndicesBuffer.position(0);
}
private void initTexture() {
GLES20.glEnable(GLES20.GL_TEXTURE_2D);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
createTexture(mPixelWidth, mPixelHeight, GLES20.GL_LUMINANCE, yTexture);
createTexture(mPixelWidth >> 1, mPixelHeight >> 1, GLES20.GL_LUMINANCE, uTexture);
createTexture(mPixelWidth >> 1, mPixelHeight >> 1, GLES20.GL_LUMINANCE, vTexture);
GLES20.glUseProgram(mProgram);
sampleYLoaction = GLES20.glGetUniformLocation(mProgram, "samplerY");
sampleULoaction = GLES20.glGetUniformLocation(mProgram, "samplerU");
sampleVLoaction = GLES20.glGetUniformLocation(mProgram, "samplerV");
GLES20.glUniform1i(sampleYLoaction, 0);
GLES20.glUniform1i(sampleULoaction, 1);
GLES20.glUniform1i(sampleVLoaction, 2);
int aPostionLocation = GLES20.glGetAttribLocation(mProgram, "aPosition");
int aTextureCoordLocation = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
GLES20.glEnableVertexAttribArray(aPostionLocation);
GLES20.glVertexAttribPointer(aPostionLocation, SHAPE_COORD_PER_VERTEX,
GLES20.GL_FLOAT, false,
SHAPE_COORD_PER_VERTEX * 4, mSquareVerticesBuffer);
GLES20.glEnableVertexAttribArray(aTextureCoordLocation);
GLES20.glVertexAttribPointer(aTextureCoordLocation, TEXTURE_COORD_PER_VERTEX,
GLES20.GL_FLOAT, false,
TEXTURE_COORD_PER_VERTEX * 4, mTextureCoordsBuffer);
}
private void createTexture(int width, int height, int format, int[] texture) {
GLES20.glGenTextures(1, texture, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture[0]);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, format, width, height, 0, format, GLES20.GL_UNSIGNED_BYTE, null);
}
private void releaseGLES() {
GLES20.glDeleteProgram(mProgram);
GLES20.glDeleteTextures(1, yTexture, 0);
GLES20.glDeleteTextures(1, uTexture, 0);
GLES20.glDeleteTextures(1, vTexture, 0);
mEgl.eglMakeCurrent(mEglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
mEgl.eglDestroySurface(mEglDisplay, mEglSurface);
mEgl.eglDestroyContext(mEglDisplay, mEglContext);
mEgl.eglTerminate(mEglDisplay);
}
//Pixel Buff
private final Object syncBuff = new Object();
private ByteBuffer yBuf;
private ByteBuffer uBuf;
private ByteBuffer vBuf;
//texture
private int[] yTexture = new int[1];
private int[] uTexture = new int[1];
private int[] vTexture = new int[1];
private int sampleYLoaction;
private int sampleULoaction;
private int sampleVLoaction;
//shape vertices
private FloatBuffer mSquareVerticesBuffer;
private static float squareVertices[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
1.0f, 1.0f, 0.0f,
-1.0f, 1.0f, 0.0f
};
//texture coordinate vertices
private FloatBuffer mTextureCoordsBuffer;
private static float textureVertices[] = {
0.0f, 1.0f,
1.0f, 1.0f,
1.0f, 0.0f,
0.0f, 0.0f
};
//gl draw order
private ShortBuffer mDrawIndicesBuffer;
private static short drawIndices[] = {0, 1, 2, 0, 2, 3};
private static int FLOAT_SIZE_BYTES = 4;
private static int SHORT_SIZE_BYTES = 2;
private static final int SHAPE_COORD_PER_VERTEX = 3;
private static final int TEXTURE_COORD_PER_VERTEX = 2;
private static String vertexShaderCode =
"attribute vec4 aPosition;\n" +
"attribute vec2 aTextureCoord;\n" +
"varying vec2 vTextureCoord;\n" +
"void main(){\n" +
" gl_Position= aPosition;\n" +
" vTextureCoord = aTextureCoord;\n" +
"}";
private static String fragmentshaderCode =
"varying lowp vec2 vTextureCoord;\n" +
"uniform sampler2D samplerY;\n" +
"uniform sampler2D samplerU;\n" +
"uniform sampler2D samplerV;\n" +
"const mediump mat3 yuv2rgb = mat3(1,1,1,0,-0.39465,2.03211,1.13983,-0.5806,0);\n" +
"void main(){\n" +
" mediump vec3 yuv;\n" +
" yuv.x = texture2D(samplerY,vTextureCoord).r;\n" +
" yuv.y = texture2D(samplerU,vTextureCoord).r - 0.5;\n" +
" yuv.z = texture2D(samplerV,vTextureCoord).r - 0.5;\n" +
" gl_FragColor = vec4(yuv2rgb*yuv,1);\n" +
"}";
}
@SuppressWarnings("all")
private static native void NV21TOYUV(byte[] src, byte[] dstY, byte[] dstU, byte[] dstV, int width, int height);
}

View File

@@ -0,0 +1,14 @@
package me.lake.librestreaming.render;
import android.graphics.SurfaceTexture;
public interface IRender {
void create(SurfaceTexture visualSurfaceTexture, int pixelFormat, int pixelWidth, int pixelHeight, int visualWidth, int visualHeight);
void update(int visualWidth, int visualHeight);
void rendering(byte[] pixel);
void destroy(boolean releaseTexture);
}

View File

@@ -0,0 +1,50 @@
package me.lake.librestreaming.render;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.view.Surface;
import me.lake.librestreaming.tools.LogTools;
public class NativeRender implements IRender {
Surface mVisualSurface;
int mPixelWidth;
int mPixelHeight;
int mPixelSize;
@Override
public void create(SurfaceTexture visualSurfaceTexture, int pixelFormat, int pixelWidth, int pixelHeight, int visualWidth, int visualHeight) {
if (pixelFormat != ImageFormat.NV21) {
throw new IllegalArgumentException("NativeRender,pixelFormat only support NV21");
}
mVisualSurface = new Surface(visualSurfaceTexture);
mPixelWidth = pixelWidth;
mPixelHeight = pixelHeight;
mPixelSize = (3 * pixelWidth * pixelHeight) / 2;
}
@Override
public void update(int visualWidth, int visualHeight) {
}
@Override
public void rendering(byte[] pixel) {
if (mVisualSurface != null && mVisualSurface.isValid()) {
renderingSurface(mVisualSurface, pixel, mPixelWidth, mPixelHeight, mPixelSize);
} else {
LogTools.d("NativeRender,rendering()invalid Surface");
}
}
@Override
public void destroy(boolean releaseTexture) {
if(releaseTexture) {
mVisualSurface.release();
}
}
@SuppressWarnings("all")
private native void renderingSurface(Surface surface, byte[] pixels, int w, int h, int s);
}

View File

@@ -0,0 +1,124 @@
package me.lake.librestreaming.rtmp;
import java.util.ArrayList;
import me.lake.librestreaming.model.RESCoreParameters;
/**
* This class is able to generate a FLVTAG in accordance with Adobe Flash Video File Format
* Specification v10.1 Annex E.5 with limited types available.
*/
public class FLvMetaData {
private static final String Name = "onMetaData";
private static final int ScriptData = 18;
private static final byte[] TS_SID = {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00};
private static final byte[] ObjEndMarker = {0x00, 0x00, 0x09};
private static final int EmptySize = 21;
private ArrayList<byte[]> MetaData;
private int DataSize;
private int pointer;
private byte[] MetaDataFrame;
public FLvMetaData() {
MetaData = new ArrayList<>();
DataSize = 0;
}
public FLvMetaData(RESCoreParameters coreParameters) {
this();
//Audio
//AAC
setProperty("audiocodecid", 10);
switch (coreParameters.mediacodecAACBitRate) {
case 32 * 1024:
setProperty("audiodatarate", 32);
break;
case 48 * 1024:
setProperty("audiodatarate", 48);
break;
case 64 * 1024:
setProperty("audiodatarate", 64);
break;
}
switch (coreParameters.mediacodecAACSampleRate) {
case 44100:
setProperty("audiosamplerate", 44100);
break;
default:
break;
}
//Video
//h264
setProperty("videocodecid", 7);
setProperty("framerate", coreParameters.mediacodecAVCFrameRate);
setProperty("width", coreParameters.videoWidth);
setProperty("height", coreParameters.videoHeight);
}
public void setProperty(String Key, int value) {
addProperty(toFlvString(Key), (byte) 0, toFlvNum(value));
}
public void setProperty(String Key, String value) {
addProperty(toFlvString(Key), (byte) 2, toFlvString(value));
}
private void addProperty(byte[] Key, byte datatype, byte[] data) {
int Propertysize = Key.length + 1 + data.length;
byte[] Property = new byte[Propertysize];
System.arraycopy(Key, 0, Property, 0, Key.length);
Property[Key.length] = datatype;
System.arraycopy(data, 0, Property, Key.length + 1, data.length);
MetaData.add(Property);
DataSize += Propertysize;
}
public byte[] getMetaData() {
MetaDataFrame = new byte[DataSize + EmptySize];
pointer = 0;
//SCRIPTDATA.name
Addbyte(2);
AddbyteArray(toFlvString(Name));
//SCRIPTDATA.value ECMA array
Addbyte(8);
AddbyteArray(toUI(MetaData.size(), 4));
for (byte[] Property : MetaData) {
AddbyteArray(Property);
}
AddbyteArray(ObjEndMarker);
return MetaDataFrame;
}
private void Addbyte(int value) {
MetaDataFrame[pointer] = (byte) value;
pointer++;
}
private void AddbyteArray(byte[] value) {
System.arraycopy(value, 0, MetaDataFrame, pointer, value.length);
pointer += value.length;
}
private byte[] toFlvString(String text) {
byte[] FlvString = new byte[text.length() + 2];
System.arraycopy(toUI(text.length(), 2), 0, FlvString, 0, 2);
System.arraycopy(text.getBytes(), 0, FlvString, 2, text.length());
return FlvString;
}
private byte[] toUI(long value, int bytes) {
byte[] UI = new byte[bytes];
for (int i = 0; i < bytes; i++) {
UI[bytes - 1 - i] = (byte) (value >> (8 * i) & 0xff);
}
return UI;
}
private byte[] toFlvNum(double value) {
long tmp = Double.doubleToLongBits(value);
return toUI(tmp, 8);
}
}

View File

@@ -0,0 +1,27 @@
package me.lake.librestreaming.rtmp;
public class RESFlvData {
public final static int FLV_RTMP_PACKET_TYPE_VIDEO = 9;
public final static int FLV_RTMP_PACKET_TYPE_AUDIO = 8;
public final static int FLV_RTMP_PACKET_TYPE_INFO = 18;
public final static int NALU_TYPE_IDR = 5;
public boolean droppable;
public int dts;//解码时间戳
public byte[] byteBuffer; //数据
public int size; //字节长度
public int flvTagType; //视频和音频的分类
public int videoFrameType;
public boolean isKeyframe() {
return videoFrameType == NALU_TYPE_IDR;
}
}

View File

@@ -0,0 +1,6 @@
package me.lake.librestreaming.rtmp;
public interface RESFlvDataCollecter {
void collect(RESFlvData flvData, int type);
}

View File

@@ -0,0 +1,308 @@
package me.lake.librestreaming.rtmp;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import me.lake.librestreaming.client.CallbackDelivery;
import me.lake.librestreaming.core.RESByteSpeedometer;
import me.lake.librestreaming.core.RESFrameRateMeter;
import me.lake.librestreaming.core.listener.RESConnectionListener;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.tools.LogTools;
public class RESRtmpSender {
private static final int TIMEGRANULARITY = 3000;
public static final int FROM_AUDIO = 8;
public static final int FROM_VIDEO = 6;
private WorkHandler workHandler;
private HandlerThread workHandlerThread;
private final Object syncOp = new Object();
public void prepare(RESCoreParameters coreParameters) {
synchronized (syncOp) {
workHandlerThread = new HandlerThread("RESRtmpSender,workHandlerThread");
workHandlerThread.start();
workHandler = new WorkHandler(coreParameters.senderQueueLength,
new FLvMetaData(coreParameters),
workHandlerThread.getLooper());
}
}
public void setConnectionListener(RESConnectionListener connectionListener) {
synchronized (syncOp) {
workHandler.setConnectionListener(connectionListener);
}
}
public String getServerIpAddr() {
synchronized (syncOp) {
return workHandler == null ? null : workHandler.getServerIpAddr();
}
}
public float getSendFrameRate() {
synchronized (syncOp) {
return workHandler == null ? 0 : workHandler.getSendFrameRate();
}
}
public float getSendBufferFreePercent() {
synchronized (syncOp) {
return workHandler == null ? 0 : workHandler.getSendBufferFreePercent();
}
}
public void start(String rtmpAddr) {
synchronized (syncOp) {
workHandler.sendStart(rtmpAddr);
}
}
public void feed(RESFlvData flvData, int type) {
synchronized (syncOp) {
workHandler.sendFood(flvData, type);
}
}
public void stop() {
synchronized (syncOp) {
workHandler.sendStop();
}
}
public void destroy() {
synchronized (syncOp) {
workHandler.removeCallbacksAndMessages(null);
//workHandlerThread.quit();
workHandler.sendStop();
workHandlerThread.quitSafely();
/**
* do not wait librtmp to quit
*/
// try {
// workHandlerThread.join();
// } catch (InterruptedException ignored) {
// }
}
}
public int getTotalSpeed() {
synchronized (syncOp) {
if (workHandler != null) {
return workHandler.getTotalSpeed();
} else {
return 0;
}
}
}
public WorkHandler getWorkHandler(){
return workHandler;
}
public static class WorkHandler extends Handler {
private final static int MSG_START = 1;
private final static int MSG_WRITE = 2;
private final static int MSG_STOP = 3;
private long jniRtmpPointer = 0;
private String serverIpAddr = null;
private int maxQueueLength;
private int writeMsgNum = 0;
private final Object syncWriteMsgNum = new Object();
private RESByteSpeedometer videoByteSpeedometer = new RESByteSpeedometer(TIMEGRANULARITY);
private RESByteSpeedometer audioByteSpeedometer = new RESByteSpeedometer(TIMEGRANULARITY);
private RESFrameRateMeter sendFrameRateMeter = new RESFrameRateMeter();
private FLvMetaData fLvMetaData;
private RESConnectionListener connectionListener;
private final Object syncConnectionListener = new Object();
private int errorTime = 0;
private enum STATE {
IDLE,
RUNNING,
STOPPED
}
private STATE state;
WorkHandler(int maxQueueLength, FLvMetaData fLvMetaData, Looper looper) {
super(looper);
this.maxQueueLength = maxQueueLength;
this.fLvMetaData = fLvMetaData;
state = STATE.IDLE;
}
public String getServerIpAddr() {
return serverIpAddr;
}
public float getSendFrameRate() {
return sendFrameRateMeter.getFps();
}
public float getSendBufferFreePercent() {
synchronized (syncWriteMsgNum) {
float res = (float) (maxQueueLength - writeMsgNum) / (float) maxQueueLength;
return res <= 0 ? 0f : res;
}
}
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MSG_START:
if (state == STATE.RUNNING) {
break;
}
sendFrameRateMeter.reSet();
LogTools.d("RESRtmpSender,WorkHandler,tid=" + Thread.currentThread().getId());
jniRtmpPointer = RtmpClient.open((String) msg.obj, true);
final int openR = jniRtmpPointer == 0 ? 1 : 0;
if (openR == 0) {
serverIpAddr = RtmpClient.getIpAddr(jniRtmpPointer);
}
synchronized (syncConnectionListener) {
if (connectionListener != null) {
CallbackDelivery.i().post(new Runnable() {
@Override
public void run() {
connectionListener.onOpenConnectionResult(openR);
}
});
}
}
if (jniRtmpPointer == 0) {
break;
} else {
byte[] MetaData = fLvMetaData.getMetaData();
RtmpClient.write(jniRtmpPointer,
MetaData,
MetaData.length,
RESFlvData.FLV_RTMP_PACKET_TYPE_INFO, 0);
state = STATE.RUNNING;
}
break;
case MSG_STOP:
if (state == STATE.STOPPED || jniRtmpPointer == 0) {
break;
}
errorTime = 0;
final int closeR = RtmpClient.close(jniRtmpPointer);
serverIpAddr = null;
synchronized (syncConnectionListener) {
if (connectionListener != null) {
CallbackDelivery.i().post(new Runnable() {
@Override
public void run() {
connectionListener.onCloseConnectionResult(closeR);
}
});
}
}
state = STATE.STOPPED;
break;
case MSG_WRITE:
synchronized (syncWriteMsgNum) {
--writeMsgNum;
}
if (state != STATE.RUNNING) {
break;
}
if(mListener!=null){
mListener.getBufferFree(getSendBufferFreePercent());
}
RESFlvData flvData = (RESFlvData) msg.obj;
if (writeMsgNum >= (maxQueueLength * 3 / 4) && flvData.flvTagType == RESFlvData.FLV_RTMP_PACKET_TYPE_VIDEO && flvData.droppable) {
LogTools.d("senderQueue is crowded,abandon video");
break;
}
final int res = RtmpClient.write(jniRtmpPointer, flvData.byteBuffer, flvData.byteBuffer.length, flvData.flvTagType, flvData.dts);
if (res == 0) {
errorTime = 0;
if (flvData.flvTagType == RESFlvData.FLV_RTMP_PACKET_TYPE_VIDEO) {
videoByteSpeedometer.gain(flvData.size);
sendFrameRateMeter.count();
} else {
audioByteSpeedometer.gain(flvData.size);
}
} else {
++errorTime;
synchronized (syncConnectionListener) {
if (connectionListener != null) {
CallbackDelivery.i().post(new RESConnectionListener.RESWriteErrorRunable(connectionListener, res));
}
}
}
break;
default:
break;
}
}
public void sendStart(String rtmpAddr) {
this.removeMessages(MSG_START);
synchronized (syncWriteMsgNum) {
this.removeMessages(MSG_WRITE);
writeMsgNum = 0;
}
this.sendMessage(this.obtainMessage(MSG_START, rtmpAddr));
}
public void sendStop() {
this.removeMessages(MSG_STOP);
synchronized (syncWriteMsgNum) {
this.removeMessages(MSG_WRITE);
writeMsgNum = 0;
}
this.sendEmptyMessage(MSG_STOP);
}
public void sendFood(RESFlvData flvData, int type) {
synchronized (syncWriteMsgNum) {
//LAKETODO optimize
if (writeMsgNum <= maxQueueLength) {
this.sendMessage(this.obtainMessage(MSG_WRITE, type, 0, flvData));
++writeMsgNum;
} else {
LogTools.d("senderQueue is full,abandon");
}
}
}
public void setConnectionListener(RESConnectionListener connectionListener) {
synchronized (syncConnectionListener) {
this.connectionListener = connectionListener;
}
}
public int getTotalSpeed() {
return getVideoSpeed() + getAudioSpeed();
}
public int getVideoSpeed() {
return videoByteSpeedometer.getSpeed();
}
public int getAudioSpeed() {
return audioByteSpeedometer.getSpeed();
}
private BufferFreeListener mListener=null;
public interface BufferFreeListener{
void getBufferFree(float free);
}
public void setBufferFreeListener(BufferFreeListener listener){
mListener=listener;
}
}
}

View File

@@ -0,0 +1,25 @@
package me.lake.librestreaming.rtmp;
public class RtmpClient {
static {
System.loadLibrary("resrtmp");
}
/**
* @param url
* @param isPublishMode
* @return rtmpPointer ,pointer to native rtmp struct
*/
public static native long open(String url, boolean isPublishMode);
public static native int read(long rtmpPointer, byte[] data, int offset, int size);
public static native int write(long rtmpPointer, byte[] data, int size, int type, int ts);
public static native int close(long rtmpPointer);
public static native String getIpAddr(long rtmpPointer);
}

View File

@@ -0,0 +1,19 @@
package me.lake.librestreaming.tools;
import android.graphics.ImageFormat;
import android.media.MediaCodecInfo;
public class BuffSizeCalculator {
public static int calculator(int width, int height, int colorFormat) {
switch (colorFormat) {
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
case ImageFormat.NV21:
case ImageFormat.YV12:
return width * height * 3 / 2;
default:
return -1;
}
}
}

View File

@@ -0,0 +1,16 @@
package me.lake.librestreaming.tools;
public class ByteArrayTools {
public static void intToByteArrayFull(byte[] dst, int pos, int interger) {
dst[pos] = (byte) ((interger >> 24) & 0xFF);
dst[pos + 1] = (byte) ((interger >> 16) & 0xFF);
dst[pos + 2] = (byte) ((interger >> 8) & 0xFF);
dst[pos + 3] = (byte) ((interger) & 0xFF);
}
public static void intToByteArrayTwoByte(byte[] dst, int pos, int interger) {
dst[pos] = (byte) ((interger >> 8) & 0xFF);
dst[pos + 1] = (byte) ((interger) & 0xFF);
}
}

View File

@@ -0,0 +1,205 @@
package me.lake.librestreaming.tools;
import android.app.Activity;
import android.content.Context;
import android.content.res.Configuration;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.util.Log;
import android.view.Surface;
import java.lang.ref.WeakReference;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* Created by WangShuo on 2017/6/30.
*/
public class CameraUtil {
private static final String tag = "wangshuo";
private CameraSizeComparator sizeComparator = new CameraSizeComparator();
private CameraSizeComparator2 sizeComparator2 = new CameraSizeComparator2();
private static CameraUtil cameraUtil = null;
public static boolean hasSupportedFrontVideoSizes = true;
private CameraUtil(){
}
public static CameraUtil getInstance(){
if(cameraUtil == null){
cameraUtil = new CameraUtil();
return cameraUtil;
}
else{
return cameraUtil;
}
}
public Size getBestSize(List<Size> list, int th){
if(list == null || list.size() < 1){
return null;
}
boolean bool= false;
Collections.sort(list, sizeComparator2);
int i = 0;
for(Size s:list){
if((s.width < th) && (s.width > 350) && equalRate(s, 1.7777f)){
Log.i(tag, "最终设置Video尺寸:w = " + s.width + "h = " + s.height);
bool = true;
break;
}
i++;
}
if(bool){
return list.get(i);
}
return null;
}
public Size getBestPreviewSize(List<Size> list, int th){
if(list == null || list.size() < 1){
return null;
}
boolean bool= false;
Collections.sort(list, sizeComparator);
int i = 0;
for(Size s:list){
if((s.width > th) && equalRate(s, 1.7777f)){
Log.i(tag, "最终设置预览尺寸:w = " + s.width + "h = " + s.height);
bool = true;
break;
}
i++;
}
if(bool){
return list.get(i);
}
return null;
}
public boolean equalRate(Size s, float rate){
float r = (float)(s.width)/(float)(s.height);
if(Math.abs(r - rate) <= 0.2)
{
return true;
}
else{
return false;
}
}
public class CameraSizeComparator implements Comparator<Size> {
//按升序排列
public int compare(Size lhs, Size rhs) {
// TODO Auto-generated method stub
if(lhs.width == rhs.width){
return 0;
}
else if(lhs.width > rhs.width){
return 1;
}
else{
return -1;
}
}
}
public class CameraSizeComparator2 implements Comparator<Size> {
//按降序排列
public int compare(Size lhs, Size rhs) {
// TODO Auto-generated method stub
if(lhs.width == rhs.width){
return 0;
}
else if(lhs.width < rhs.width){
return 1;
}
else{
return -1;
}
}
}
public static List<Size> getBackCameraPreviewSize(){
Camera back = Camera.open(Camera.CameraInfo.CAMERA_FACING_BACK);
List<Size> backSizeList = back.getParameters().getSupportedPreviewSizes();
back.release();
return backSizeList;
}
public static List<Size> getFrontCameraPreviewSize(){
Camera front = Camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
List<Size> frontSizeList = front.getParameters().getSupportedPreviewSizes();
front.release();
return frontSizeList;
}
public static List<Size> getBackCameraVideoSize(){
Camera back = Camera.open(Camera.CameraInfo.CAMERA_FACING_BACK);
List<Size> backSizeList = back.getParameters().getSupportedVideoSizes();
back.release();
return backSizeList;
}
public static List<Size> getFrontCameraVideoSize(){
Camera front = Camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
List<Size> frontSizeList = front.getParameters().getSupportedVideoSizes();
front.release();
return frontSizeList;
}
public static List<Size> getFrontCameraSize(){
Camera front = Camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
List<Size> frontSizeList = front.getParameters().getSupportedVideoSizes();
if(null == frontSizeList || frontSizeList.size()<=0){
frontSizeList = front.getParameters().getSupportedPreviewSizes();
hasSupportedFrontVideoSizes = false;
Log.e(tag,"getSupportedVideoSizes==null");
}
front.release();
return frontSizeList;
}
/**
* 判断当前设备是手机还是平板,代码来自 Google I/O App for Android
* @param context
* @return 平板返回 True手机返回 False
*/
public static boolean isTablet(Context context) {
return (context.getResources().getConfiguration().screenLayout & Configuration.SCREENLAYOUT_SIZE_MASK) >= Configuration.SCREENLAYOUT_SIZE_LARGE;
}
public static void setCameraDisplayOrientation(Activity activity,
int cameraId, android.hardware.Camera camera) {
android.hardware.Camera.CameraInfo info =
new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(cameraId, info);
int rotation = activity.getWindowManager().getDefaultDisplay()
.getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0: degrees = 0; break;
case Surface.ROTATION_90: degrees = 90; break;
case Surface.ROTATION_180: degrees = 180; break;
case Surface.ROTATION_270: degrees = 270; break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
//result=180;
System.out.println("设置相机角度:"+result);
camera.setDisplayOrientation(result);
}
}

View File

@@ -0,0 +1,125 @@
package me.lake.librestreaming.tools;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
public class GLESTools {
public static int FLOAT_SIZE_BYTES = 4;
public static int SHORT_SIZE_BYTES = 2;
public static String readTextFile(Resources res, int resId) {
InputStream inputStream = res.openRawResource(resId);
BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
String line;
StringBuilder result = new StringBuilder();
try {
while ((line = br.readLine()) != null) {
result.append(line);
result.append("\n");
}
} catch (Exception e) {
e.printStackTrace();
return null;
}
return result.toString();
}
public static int createProgram(Resources res, int vertexShaderResId, int fragmentShaderResId) {
String vertexShaderCode = readTextFile(res, vertexShaderResId);
String fragmentShaderCode = readTextFile(res, fragmentShaderResId);
return createProgram(vertexShaderCode, fragmentShaderCode);
}
public static int createProgram(String vertexShaderCode, String fragmentShaderCode) {
if (vertexShaderCode == null || fragmentShaderCode == null) {
throw new RuntimeException("invalid shader code");
}
int vertexShader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
int fragmentShader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(vertexShader, vertexShaderCode);
GLES20.glShaderSource(fragmentShader, fragmentShaderCode);
int[] status = new int[1];
GLES20.glCompileShader(vertexShader);
GLES20.glGetShaderiv(vertexShader, GLES20.GL_COMPILE_STATUS, status, 0);
if (GLES20.GL_FALSE == status[0]) {
throw new RuntimeException("vertext shader compile,failed:" + GLES20.glGetShaderInfoLog(vertexShader));
}
GLES20.glCompileShader(fragmentShader);
GLES20.glGetShaderiv(fragmentShader, GLES20.GL_COMPILE_STATUS, status, 0);
if (GLES20.GL_FALSE == status[0]) {
throw new RuntimeException("fragment shader compile,failed:" + GLES20.glGetShaderInfoLog(fragmentShader));
}
int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, status, 0);
if (GLES20.GL_FALSE == status[0]) {
throw new RuntimeException("link program,failed:" + GLES20.glGetProgramInfoLog(program));
}
return program;
}
public static void checkGlError(String op) {
int error = GLES20.glGetError();
if (error != GLES20.GL_NO_ERROR) {
String msg = op + ": glError 0x" + Integer.toHexString(error);
LogTools.d(msg);
throw new RuntimeException(msg);
}
}
public static final int NO_TEXTURE = -1;
public static int loadTexture(final Bitmap image, final int reUseTexture) {
int[] texture = new int[1];
if (reUseTexture == NO_TEXTURE) {
GLES20.glGenTextures(1, texture, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture[0]);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, image, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
} else {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, reUseTexture);
GLUtils.texSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, image);
texture[0] = reUseTexture;
}
return texture[0];
}
public static void createFrameBuff(int[] frameBuffer, int[] frameBufferTex, int width, int height) {
GLES20.glGenFramebuffers(1, frameBuffer, 0);
GLES20.glGenTextures(1, frameBufferTex, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, frameBufferTex[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLESTools.checkGlError("createCamFrameBuff");
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer[0]);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, frameBufferTex[0], 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLESTools.checkGlError("createCamFrameBuff");
}
}

View File

@@ -0,0 +1,72 @@
package me.lake.librestreaming.tools;
import android.util.Log;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.Writer;
import java.net.UnknownHostException;
public class LogTools {
protected static final String TAG = "RESLog";
private static boolean enableLog = false;
public static boolean isEnableLog() {
return enableLog;
}
public static void setEnableLog(boolean enableLog) {
LogTools.enableLog = enableLog;
}
public static void e(String content) {
if (!enableLog) {
return;
}
Log.e(TAG, content);
}
public static void d(String content) {
if (!enableLog) {
return;
}
Log.d(TAG, content);
}
public static void trace(String msg) {
if (!enableLog) {
return;
}
trace(msg, new Throwable());
}
public static void trace(Throwable e) {
if (!enableLog) {
return;
}
trace(null, e);
}
public static void trace(String msg, Throwable e) {
if (!enableLog) {
return;
}
if (null == e || e instanceof UnknownHostException) {
return;
}
final Writer writer = new StringWriter();
final PrintWriter pWriter = new PrintWriter(writer);
e.printStackTrace(pWriter);
String stackTrace = writer.toString();
if (null == msg || msg.equals("")) {
msg = "================error!==================";
}
Log.e(TAG, "==================================");
Log.e(TAG, msg);
Log.e(TAG, stackTrace);
Log.e(TAG, "-----------------------------------");
}
}

View File

@@ -0,0 +1,91 @@
package me.lake.librestreaming.ws;
import android.content.Context;
import android.util.AttributeSet;
import android.view.TextureView;
import android.view.View;
public class AspectTextureView extends TextureView {
public static final int MODE_FITXY = 0;
public static final int MODE_INSIDE = 1;
public static final int MODE_OUTSIDE = 2;
private double targetAspect = -1;
private int aspectMode = MODE_OUTSIDE;
public AspectTextureView(Context context) {
super(context);
}
public AspectTextureView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public AspectTextureView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
/**
* @param mode {@link #MODE_FITXY},{@link #MODE_INSIDE},{@link #MODE_OUTSIDE}
* @param aspectRatio width/height
*/
public void setAspectRatio(int mode, double aspectRatio) {
if (mode != MODE_INSIDE && mode != MODE_OUTSIDE && mode != MODE_FITXY) {
throw new IllegalArgumentException("illegal mode");
}
if (aspectRatio < 0) {
throw new IllegalArgumentException("illegal aspect ratio");
}
if (targetAspect != aspectRatio || aspectMode != mode) {
targetAspect = aspectRatio;
aspectMode = mode;
requestLayout();
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
if (targetAspect > 0) {
int initialWidth = MeasureSpec.getSize(widthMeasureSpec);
int initialHeight = MeasureSpec.getSize(heightMeasureSpec);
double viewAspectRatio = (double) initialWidth / initialHeight;
double aspectDiff = targetAspect / viewAspectRatio - 1;
if (Math.abs(aspectDiff) > 0.01 && aspectMode != MODE_FITXY) {
if (aspectMode == MODE_INSIDE) {
if (aspectDiff > 0) {
initialHeight = (int) (initialWidth / targetAspect);
} else {
initialWidth = (int) (initialHeight * targetAspect);
}
} else if (aspectMode == MODE_OUTSIDE) {
if (aspectDiff > 0) {
initialWidth = (int) (initialHeight * targetAspect);
} else {
initialHeight = (int) (initialWidth / targetAspect);
}
}
widthMeasureSpec = MeasureSpec.makeMeasureSpec(initialWidth, MeasureSpec.EXACTLY);
heightMeasureSpec = MeasureSpec.makeMeasureSpec(initialHeight, MeasureSpec.EXACTLY);
}
}
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
@Override
public void layout(int l, int t, int r, int b) {
View p = (View) getParent();
if (p != null) {
int pw = p.getMeasuredWidth();
int ph = p.getMeasuredHeight();
int w = getMeasuredWidth();
int h = getMeasuredHeight();
t = (ph - h) / 2;
l = (pw - w) / 2;
r += l;
b += t;
}
super.layout(l, t, r, b);
}
}

View File

@@ -0,0 +1,21 @@
package me.lake.librestreaming.ws;
/**
* Created by WangShuo on 2017/6/11.
*/
public class StreamAVOption {
public static int cameraIndex = StreamConfig.AVOptionsHolder.DEFAULT_CAMERA_INDEX;//前后置摄像头
public static int previewWidth = StreamConfig.AVOptionsHolder.DEFAULT_PREVIEW_WIDTH;//预览宽
public static int previewHeight = StreamConfig.AVOptionsHolder.DEFAULT_PREVIEW_HEIGHT;//预览高
public static int videoWidth = StreamConfig.AVOptionsHolder.DEFAULT_VIDEO_WIDTH;//推流的视频宽
public static int videoHeight = StreamConfig.AVOptionsHolder.DEFAULT_VIDEO_HEIGHT;//推流的视频高
public static int videoBitrate = StreamConfig.AVOptionsHolder.DEFAULT_VIDEO_BITRATE;//比特率
public static int videoFramerate = StreamConfig.AVOptionsHolder.DEFAULT_VIDEO_FPS;//帧率
public static int videoGOP = StreamConfig.AVOptionsHolder.DEFAULT_VIDEO_GOP;//gop 关键帧间隔
public String streamUrl = "";
public static int recordVideoWidth = 1920;//录制的视频宽
public static int recordVideoHeight = 1080;//录制的视频高
}

View File

@@ -0,0 +1,74 @@
package me.lake.librestreaming.ws;
import android.content.Context;
import android.content.res.Configuration;
import android.hardware.Camera;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.Size;
import static me.lake.librestreaming.ws.StreamConfig.AVOptionsHolder.DEFAULT_FILTER_MODE;
import static me.lake.librestreaming.ws.StreamConfig.AVOptionsHolder.DEFAULT_RENDER_MODE;
/**
* Created by WangShuo on 2017/6/11.
*/
public class StreamConfig {
public static class AVOptionsHolder {
public static final int DEFAULT_CAMERA_INDEX = Camera.CameraInfo.CAMERA_FACING_BACK;
public static final int DEFAULT_FILTER_MODE = RESConfig.FilterMode.HARD;
public static final int DEFAULT_RENDER_MODE = RESConfig.RenderingMode.NativeWindow;
public static final int DEFAULT_PREVIEW_WIDTH = 1920;
public static final int DEFAULT_PREVIEW_HEIGHT = 1080;
public static final int DEFAULT_VIDEO_WIDTH = 1920;
public static final int DEFAULT_VIDEO_HEIGHT = 1080;
public static final int DEFAULT_VIDEO_BITRATE =20*1024*1024;
public static final int DEFAULT_VIDEO_FPS = 30;
public static final int DEFAULT_VIDEO_GOP = 1;
}
public static RESConfig build(Context context, StreamAVOption option) {
RESConfig res = RESConfig.obtain();
res.setFilterMode(DEFAULT_FILTER_MODE);
res.setRenderingMode(DEFAULT_RENDER_MODE);
res.setTargetPreviewSize(new Size(option.previewWidth,option.previewHeight));
res.setTargetVideoSize(new Size(option.videoWidth, option.videoHeight));
res.setBitRate(option.videoBitrate);
res.setVideoFPS(option.videoFramerate);
res.setVideoGOP(option.videoGOP);
res.setDefaultCamera(option.cameraIndex);
res.setRtmpAddr(option.streamUrl);
int frontDirection, backDirection;
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
Camera.getCameraInfo(Camera.CameraInfo.CAMERA_FACING_FRONT, cameraInfo);
frontDirection = cameraInfo.orientation;
Camera.getCameraInfo(Camera.CameraInfo.CAMERA_FACING_BACK, cameraInfo);
backDirection = cameraInfo.orientation;
if (context.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
System.out.println("竖屏");
res.setFrontCameraDirectionMode((frontDirection == 90 ? RESConfig.DirectionMode.FLAG_DIRECTION_ROATATION_270 : RESConfig.DirectionMode.FLAG_DIRECTION_ROATATION_90) | RESConfig.DirectionMode.FLAG_DIRECTION_FLIP_HORIZONTAL);
res.setBackCameraDirectionMode((backDirection == 90 ? RESConfig.DirectionMode.FLAG_DIRECTION_ROATATION_90 : RESConfig.DirectionMode.FLAG_DIRECTION_ROATATION_90) );
} else {
System.out.println("横屏");
res.setBackCameraDirectionMode((backDirection == 90 ? RESConfig.DirectionMode.FLAG_DIRECTION_ROATATION_0 : RESConfig.DirectionMode.FLAG_DIRECTION_ROATATION_180));
res.setFrontCameraDirectionMode((frontDirection == 90 ? RESConfig.DirectionMode.FLAG_DIRECTION_ROATATION_180 : RESConfig.DirectionMode.FLAG_DIRECTION_ROATATION_0) | RESConfig.DirectionMode.FLAG_DIRECTION_FLIP_HORIZONTAL);
}
return res;
}
}

View File

@@ -0,0 +1,413 @@
package me.lake.librestreaming.ws;
import android.app.Activity;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Gravity;
import android.view.TextureView;
import android.widget.FrameLayout;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import me.lake.librestreaming.client.RESClient;
import me.lake.librestreaming.core.listener.RESConnectionListener;
import me.lake.librestreaming.core.listener.RESScreenShotListener;
import me.lake.librestreaming.core.listener.RESVideoChangeListener;
import me.lake.librestreaming.encoder.MediaAudioEncoder;
import me.lake.librestreaming.encoder.MediaEncoder;
import me.lake.librestreaming.encoder.MediaMuxerWrapper;
import me.lake.librestreaming.encoder.MediaVideoEncoder;
import me.lake.librestreaming.filter.hardvideofilter.BaseHardVideoFilter;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.Size;
import me.lake.librestreaming.tools.CameraUtil;
import me.lake.librestreaming.ws.filter.audiofilter.SetVolumeAudioFilter;
/**
* Created by WangShuo on 2017/6/11.
*/
public class StreamLiveCameraView extends FrameLayout {
private static final String TAG = "StreamLiveCameraView";
private Context mContext;
private Activity activity;
private AspectTextureView textureView;
private final List<RESConnectionListener> outerStreamStateListeners = new ArrayList<>();
private static RESClient resClient;
private static RESConfig resConfig;
private static int quality_value_min = 400 * 1024;
private static int quality_value_max = 700 * 1024;
public StreamLiveCameraView(Context context) {
super(context);
this.mContext=context;
}
public StreamLiveCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
this.mContext=context;
}
public static synchronized RESClient getRESClient() {
if (resClient == null) {
resClient = new RESClient();
}
return resClient;
}
public void setActivity(Activity activity) {
this.activity = activity;
}
/**
* 根据AVOption初始化&打开预览
* @param avOption
*/
public void init(Context context , StreamAVOption avOption) {
if (avOption == null) {
throw new IllegalArgumentException("AVOption is null.");
}
compatibleSize(avOption);
resClient = getRESClient();
setContext(mContext);
resConfig = StreamConfig.build(context,avOption);
boolean isSucceed = resClient.prepare(resConfig);
if (!isSucceed) {
Log.w(TAG, "推流prepare方法返回false, 状态异常.");
return;
}
initPreviewTextureView();
addListenerAndFilter();
}
private void compatibleSize(StreamAVOption avOptions) {
Camera.Size cameraSize = CameraUtil.getInstance().getBestSize(CameraUtil.getFrontCameraSize(),Integer.parseInt("800"));
if(!CameraUtil.hasSupportedFrontVideoSizes){
if(null == cameraSize || cameraSize.width <= 0){
avOptions.videoWidth = 720;
avOptions.videoHeight = 480;
}else{
avOptions.videoWidth = cameraSize.width;
avOptions.videoHeight = cameraSize.height;
}
}
}
private void initPreviewTextureView() {
if (textureView == null && resClient != null) {
textureView = new AspectTextureView(getContext());
LayoutParams params = new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
params.gravity = Gravity.CENTER;
this.removeAllViews();
this.addView(textureView);
textureView.setKeepScreenOn(true);
textureView.setSurfaceTextureListener(surfaceTextureListenerImpl);
Size s = resClient.getVideoSize();
textureView.setAspectRatio(AspectTextureView.MODE_OUTSIDE, ((double) s.getWidth() / s.getHeight()));
}
}
private void addListenerAndFilter() {
if (resClient != null) {
resClient.setConnectionListener(ConnectionListener);
resClient.setVideoChangeListener(VideoChangeListener);
resClient.setSoftAudioFilter(new SetVolumeAudioFilter());
}
}
/**
* 是否推流
*/
public boolean isStreaming(){
if(resClient != null){
return resClient.isStreaming;
}
return false;
}
/**
* 开始推流
*/
public void startStreaming(String rtmpUrl){
if(resClient != null){
resClient.startStreaming(rtmpUrl);
}
}
/**
* 停止推流
*/
public void stopStreaming(){
if(resClient != null){
resClient.stopStreaming();
}
}
/**
* 开始录制
*/
private MediaMuxerWrapper mMuxer;
private boolean isRecord = false;
public void startRecord(){
if(resClient != null){
resClient.setNeedResetEglContext(true);
try {
mMuxer = new MediaMuxerWrapper(".mp4"); // if you record audio only, ".m4a" is also OK.
new MediaVideoEncoder(mMuxer, mMediaEncoderListener, StreamAVOption.recordVideoWidth, StreamAVOption.recordVideoHeight);
new MediaAudioEncoder(mMuxer, mMediaEncoderListener);
mMuxer.prepare();
mMuxer.startRecording();
isRecord = true;
} catch (IOException e) {
isRecord = false;
e.printStackTrace();
}
}
}
/**
* 停止录制
*/
public String stopRecord() {
isRecord = false;
if (mMuxer != null) {
String path = mMuxer.getFilePath();
mMuxer.stopRecording();
mMuxer = null;
System.gc();
return path;
}
System.gc();
return null;
}
/**
* 是否在录制
*/
public boolean isRecord() {
return isRecord;
}
/**
* 切换摄像头
*/
public void swapCamera(){
if(resClient != null){
resClient.swapCamera();
}
}
/**
* 摄像头焦距 [0.0f,1.0f]
*/
public void setZoomByPercent(float targetPercent){
if(resClient != null){
resClient.setZoomByPercent(targetPercent);
}
}
/**
*摄像头开关闪光灯
*/
public void toggleFlashLight(){
if(resClient != null){
resClient.toggleFlashLight();
}
}
/**
* 推流过程中,重新设置帧率
*/
public void reSetVideoFPS(int fps){
if(resClient != null){
resClient.reSetVideoFPS(fps);
}
}
/**
* 推流过程中,重新设置码率
*/
public void reSetVideoBitrate(int bitrate){
if(resClient != null){
resClient.reSetVideoBitrate(bitrate);
}
}
/**
* 截图
*/
public void takeScreenShot(RESScreenShotListener listener){
if(resClient != null){
resClient.takeScreenShot(listener);
}
}
/**
* 镜像
* @param isEnableMirror 是否启用镜像功能 总开关
* @param isEnablePreviewMirror 是否开启预览镜像
* @param isEnableStreamMirror 是否开启推流镜像
*/
public void setMirror(boolean isEnableMirror,boolean isEnablePreviewMirror,boolean isEnableStreamMirror) {
if(resClient != null) {
resClient.setMirror(isEnableMirror, isEnablePreviewMirror, isEnableStreamMirror);
}
}
/**
* 设置滤镜
*/
public void setHardVideoFilter(BaseHardVideoFilter baseHardVideoFilter){
if(resClient != null){
resClient.setHardVideoFilter(baseHardVideoFilter);
}
}
/**
* 获取BufferFreePercent
*/
public float getSendBufferFreePercent() {
return resClient.getSendBufferFreePercent();
}
/**
* AVSpeed 推流速度 和网络相关
*/
public int getAVSpeed() {
return resClient.getAVSpeed();
}
/**
* 设置上下文
*/
public void setContext(Context context){
if(resClient != null){
resClient.setContext(context);
}
}
/**
* destroy
*/
public void destroy(){
if (resClient != null) {
resClient.setConnectionListener(null);
resClient.setVideoChangeListener(null);
if(resClient.isStreaming){
resClient.stopStreaming();
}
if(isRecord()){
stopRecord();
}
resClient.destroy();
}
}
/**
* 添加推流状态监听
* @param listener
*/
public void addStreamStateListener(RESConnectionListener listener) {
if (listener != null && !outerStreamStateListeners.contains(listener)) {
outerStreamStateListeners.add(listener);
}
}
public void setCreamAr(){
resClient.setCreamAr();
}
RESConnectionListener ConnectionListener =new RESConnectionListener() {
@Override
public void onOpenConnectionResult(int result) {
if(result == 1){
resClient.stopStreaming();
}
for (RESConnectionListener listener: outerStreamStateListeners) {
listener.onOpenConnectionResult(result);
}
}
@Override
public void onWriteError(int errno) {
for (RESConnectionListener listener: outerStreamStateListeners) {
listener.onWriteError(errno);
}
}
@Override
public void onCloseConnectionResult(int result) {
for (RESConnectionListener listener: outerStreamStateListeners) {
listener.onCloseConnectionResult(result);
}
}
};
RESVideoChangeListener VideoChangeListener = new RESVideoChangeListener() {
@Override
public void onVideoSizeChanged(int width, int height) {
if(textureView != null) {
textureView.setAspectRatio(AspectTextureView.MODE_INSIDE, ((double) width) / height);
}
}
};
TextureView.SurfaceTextureListener surfaceTextureListenerImpl = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
if (resClient != null) {
resClient.startPreview(surface, width, height);
CameraUtil.setCameraDisplayOrientation(activity,resClient.getCameraId(),resClient.getCamera());
}
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
if (resClient != null) {
resClient.updatePreview(width, height);
}
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
if (resClient != null) {
resClient.stopPreview(true);
}
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
/**
* callback methods from encoder
*/
MediaEncoder.MediaEncoderListener mMediaEncoderListener = new MediaEncoder.MediaEncoderListener() {
@Override
public void onPrepared(final MediaEncoder encoder) {
if (encoder instanceof MediaVideoEncoder && resClient != null)
resClient.setVideoEncoder((MediaVideoEncoder) encoder);
}
@Override
public void onStopped(final MediaEncoder encoder) {
if (encoder instanceof MediaVideoEncoder && resClient != null)
resClient.setVideoEncoder(null);
}
};
}

View File

@@ -0,0 +1,65 @@
package me.lake.librestreaming.ws.filter.audiofilter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import me.lake.librestreaming.filter.softaudiofilter.BaseSoftAudioFilter;
public class PcmBgmAudioFilter extends BaseSoftAudioFilter {
FileInputStream fis;
String filePath;
byte[] bgm;
public PcmBgmAudioFilter(String filepath) {
filePath = filepath;
}
@Override
public void onInit(int size) {
super.onInit(size);
try {
fis = new FileInputStream(new File(filePath));
} catch (FileNotFoundException e) {
e.printStackTrace();
}
try {
fis.mark(fis.available());
} catch (IOException e) {
}
bgm = new byte[SIZE];
}
@Override
public boolean onFrame(byte[] orignBuff, byte[] targetBuff, long presentationTimeMs, int sequenceNum) {
try {
if (fis.read(bgm, 0, SIZE) < SIZE) {
return false;
}
} catch (IOException e) {
e.printStackTrace();
}
for (int i = 0; i < SIZE; i += 2) {
short origin = (short) (((orignBuff[i + 1] << 8) | orignBuff[i] & 0xff));
short bg = (short) (((bgm[i + 1] << 8) | bgm[i] & 0xff));
bg /= 32;
origin *=4;
short res = (short) (origin + bg);
targetBuff[i + 1] = (byte) (res >> 8);
targetBuff[i] = (byte) (res);
}
return true;
}
@Override
public void onDestroy() {
super.onDestroy();
try {
fis.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}

View File

@@ -0,0 +1,29 @@
package me.lake.librestreaming.ws.filter.audiofilter;
import me.lake.librestreaming.filter.softaudiofilter.BaseSoftAudioFilter;
public class SetVolumeAudioFilter extends BaseSoftAudioFilter {
private float volumeScale=1.0f;
public SetVolumeAudioFilter() {
}
/**
* @param scale 0.0~
*/
public void setVolumeScale(float scale) {
volumeScale = scale;
}
@Override
public boolean onFrame(byte[] orignBuff, byte[] targetBuff, long presentationTimeMs, int sequenceNum) {
for (int i = 0; i < SIZE; i += 2) {
short origin = (short) (((orignBuff[i + 1] << 8) | orignBuff[i] & 0xff));
origin = (short) (origin * volumeScale);
orignBuff[i + 1] = (byte) (origin >> 8);
orignBuff[i] = (byte) (origin);
}
return false;
}
}

View File

@@ -0,0 +1,43 @@
package me.lake.librestreaming.ws.filter.hardfilter;
import me.lake.librestreaming.filter.hardvideofilter.OriginalHardVideoFilter;
public class FishEyeFilterHard extends OriginalHardVideoFilter {
private static String FRAGMENTSHADER = "" +
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" +
"uniform sampler2D uCamTexture;\n" +
"varying mediump vec2 vCamTextureCoord;\n" +
"const mediump float PI = 3.1415926535;\n" +
"const mediump float aperture = 180.0;\n" +
"const mediump float apertureHalf = 0.5 * aperture * (PI / 180.0);\n" +
"const mediump float maxFactor = sin(apertureHalf);\n" +
"void main(){\n" +
" vec2 pos = 2.0 * vCamTextureCoord.st - 1.0;\n" +
" float l = length(pos);\n" +
" if (l > 1.0) {\n" +
" gl_FragColor = vec4(0.0,0.0,0.0,1);\n" +
" }\n" +
" else {\n" +
" float x = maxFactor * pos.x;\n" +
" float y = maxFactor * pos.y;\n" +
" float n = length(vec2(x, y));\n" +
" float z = sqrt(1.0 - n * n);\n" +
" float r = atan(n, z) / PI;\n" +
" float phi = atan(y, x);\n" +
" float u = r * cos(phi) + 0.5;\n" +
" float v = r * sin(phi) + 0.5;\n" +
" gl_FragColor = texture2D(uCamTexture,vec2(u,v));\n" +
" }\n" +
"}";
public FishEyeFilterHard() {
super(null, FRAGMENTSHADER);
}
@Override
public void onInit(int VWidth, int VHeight) {
super.onInit(VWidth, VHeight);
}
}

View File

@@ -0,0 +1,190 @@
package me.lake.librestreaming.ws.filter.hardfilter;
import android.opengl.GLES20;
import jp.co.cyberagent.android.gpuimage.GPUImageFilter;
/**
* @author WangShuo
*/
public class GPUImageBeautyFilter extends GPUImageFilter {
public static final String BILATERAL_FRAGMENT_SHADER = "" +
"precision highp float;\n"+
" varying highp vec2 textureCoordinate;\n" +
"\n" +
" uniform sampler2D inputImageTexture;\n" +
"\n" +
" uniform highp vec2 singleStepOffset;\n" +
" uniform highp vec4 params;\n" +
" uniform highp float brightness;\n" +
"\n" +
" const highp vec3 W = vec3(0.299, 0.587, 0.114);\n" +
" const highp mat3 saturateMatrix = mat3(\n" +
" 1.1102, -0.0598, -0.061,\n" +
" -0.0774, 1.0826, -0.1186,\n" +
" -0.0228, -0.0228, 1.1772);\n" +
" highp vec2 blurCoordinates[24];\n" +
"\n" +
" highp float hardLight(highp float color) {\n" +
" if (color <= 0.5)\n" +
" color = color * color * 2.0;\n" +
" else\n" +
" color = 1.0 - ((1.0 - color)*(1.0 - color) * 2.0);\n" +
" return color;\n" +
"}\n" +
"\n" +
" void main(){\n" +
" highp vec3 centralColor = texture2D(inputImageTexture, textureCoordinate).rgb;\n" +
" blurCoordinates[0] = textureCoordinate.xy + singleStepOffset * vec2(0.0, -10.0);\n" +
" blurCoordinates[1] = textureCoordinate.xy + singleStepOffset * vec2(0.0, 10.0);\n" +
" blurCoordinates[2] = textureCoordinate.xy + singleStepOffset * vec2(-10.0, 0.0);\n" +
" blurCoordinates[3] = textureCoordinate.xy + singleStepOffset * vec2(10.0, 0.0);\n" +
" blurCoordinates[4] = textureCoordinate.xy + singleStepOffset * vec2(5.0, -8.0);\n" +
" blurCoordinates[5] = textureCoordinate.xy + singleStepOffset * vec2(5.0, 8.0);\n" +
" blurCoordinates[6] = textureCoordinate.xy + singleStepOffset * vec2(-5.0, 8.0);\n" +
" blurCoordinates[7] = textureCoordinate.xy + singleStepOffset * vec2(-5.0, -8.0);\n" +
" blurCoordinates[8] = textureCoordinate.xy + singleStepOffset * vec2(8.0, -5.0);\n" +
" blurCoordinates[9] = textureCoordinate.xy + singleStepOffset * vec2(8.0, 5.0);\n" +
" blurCoordinates[10] = textureCoordinate.xy + singleStepOffset * vec2(-8.0, 5.0);\n" +
" blurCoordinates[11] = textureCoordinate.xy + singleStepOffset * vec2(-8.0, -5.0);\n" +
" blurCoordinates[12] = textureCoordinate.xy + singleStepOffset * vec2(0.0, -6.0);\n" +
" blurCoordinates[13] = textureCoordinate.xy + singleStepOffset * vec2(0.0, 6.0);\n" +
" blurCoordinates[14] = textureCoordinate.xy + singleStepOffset * vec2(6.0, 0.0);\n" +
" blurCoordinates[15] = textureCoordinate.xy + singleStepOffset * vec2(-6.0, 0.0);\n" +
" blurCoordinates[16] = textureCoordinate.xy + singleStepOffset * vec2(-4.0, -4.0);\n" +
" blurCoordinates[17] = textureCoordinate.xy + singleStepOffset * vec2(-4.0, 4.0);\n" +
" blurCoordinates[18] = textureCoordinate.xy + singleStepOffset * vec2(4.0, -4.0);\n" +
" blurCoordinates[19] = textureCoordinate.xy + singleStepOffset * vec2(4.0, 4.0);\n" +
" blurCoordinates[20] = textureCoordinate.xy + singleStepOffset * vec2(-2.0, -2.0);\n" +
" blurCoordinates[21] = textureCoordinate.xy + singleStepOffset * vec2(-2.0, 2.0);\n" +
" blurCoordinates[22] = textureCoordinate.xy + singleStepOffset * vec2(2.0, -2.0);\n" +
" blurCoordinates[23] = textureCoordinate.xy + singleStepOffset * vec2(2.0, 2.0);\n" +
"\n" +
" highp float sampleColor = centralColor.g * 22.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[0]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[1]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[2]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[3]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[4]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[5]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[6]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[7]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[8]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[9]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[10]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[11]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[12]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[13]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[14]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[15]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[16]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[17]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[18]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[19]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[20]).g * 3.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[21]).g * 3.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[22]).g * 3.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[23]).g * 3.0;\n" +
"\n" +
" sampleColor = sampleColor / 62.0;\n" +
"\n" +
" highp float highPass = centralColor.g - sampleColor + 0.5;\n" +
"\n" +
" for (int i = 0; i < 5; i++) {\n" +
" highPass = hardLight(highPass);\n" +
" }\n" +
" highp float lumance = dot(centralColor, W);\n" +
"\n" +
" highp float alpha = pow(lumance, params.r);\n" +
"\n" +
" highp vec3 smoothColor = centralColor + (centralColor-vec3(highPass))*alpha*0.1;\n" +
"\n" +
" smoothColor.r = clamp(pow(smoothColor.r, params.g), 0.0, 1.0);\n" +
" smoothColor.g = clamp(pow(smoothColor.g, params.g), 0.0, 1.0);\n" +
" smoothColor.b = clamp(pow(smoothColor.b, params.g), 0.0, 1.0);\n" +
"\n" +
" highp vec3 lvse = vec3(1.0)-(vec3(1.0)-smoothColor)*(vec3(1.0)-centralColor);\n" +
" highp vec3 bianliang = max(smoothColor, centralColor);\n" +
" highp vec3 rouguang = 2.0*centralColor*smoothColor + centralColor*centralColor - 2.0*centralColor*centralColor*smoothColor;\n" +
"\n" +
" gl_FragColor = vec4(mix(centralColor, lvse, alpha), 1.0);\n" +
" gl_FragColor.rgb = mix(gl_FragColor.rgb, bianliang, alpha);\n" +
" gl_FragColor.rgb = mix(gl_FragColor.rgb, rouguang, params.b);\n" +
"\n" +
" highp vec3 satcolor = gl_FragColor.rgb * saturateMatrix;\n" +
" gl_FragColor.rgb = mix(gl_FragColor.rgb, satcolor, params.a);\n" +
" gl_FragColor.rgb = vec3(gl_FragColor.rgb + vec3(brightness));\n" +
"}";
private float toneLevel;
private float beautyLevel;
private float brightLevel;
private int paramsLocation;
private int brightnessLocation;
private int singleStepOffsetLocation;
public GPUImageBeautyFilter() {
super(NO_FILTER_VERTEX_SHADER, BILATERAL_FRAGMENT_SHADER);
toneLevel = -0.5f;
beautyLevel = 0.8f;
brightLevel = 0.3f;
}
@Override
public void onInit() {
super.onInit();
paramsLocation = GLES20.glGetUniformLocation(getProgram(), "params");
brightnessLocation = GLES20.glGetUniformLocation(getProgram(), "brightness");
singleStepOffsetLocation = GLES20.glGetUniformLocation(getProgram(), "singleStepOffset");
setParams(beautyLevel, toneLevel);
setBrightLevel(brightLevel);
}
//磨皮
public void setBeautyLevel(float beautyLevel) {
this.beautyLevel = beautyLevel;
setParams(beautyLevel, toneLevel);
}
//美白
public void setBrightLevel(float brightLevel) {
this.brightLevel = brightLevel;
setFloat(brightnessLocation, 0.6f * (-0.5f + brightLevel));
}
//红润
public void setToneLevel(float toneLevel) {
this.toneLevel = toneLevel;
setParams(beautyLevel, toneLevel);
}
public void setAllBeautyParams(float beauty,float bright,float tone) {
setBeautyLevel(beauty);
setBrightLevel(bright);
setToneLevel(tone);
}
public void setParams(float beauty, float tone) {
float[] vector = new float[4];
vector[0] = 1.0f - 0.6f * beauty;
vector[1] = 1.0f - 0.3f * beauty;
vector[2] = 0.1f + 0.3f * tone;
vector[3] = 0.1f + 0.3f * tone;
setFloatVec4(paramsLocation, vector);
}
private void setTexelSize(final float w, final float h) {
setFloatVec2(singleStepOffsetLocation, new float[] {2.0f / w, 2.0f / h});
}
@Override
public void onOutputSizeChanged(final int width, final int height) {
super.onOutputSizeChanged(width, height);
setTexelSize(width, height);
}
}

View File

@@ -0,0 +1,176 @@
package me.lake.librestreaming.ws.filter.hardfilter;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.RectF;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.os.Build;
import java.nio.FloatBuffer;
import me.lake.librestreaming.filter.hardvideofilter.BaseHardVideoFilter;
import me.lake.librestreaming.tools.GLESTools;
public class WatermarkFilter extends BaseHardVideoFilter {
protected int glProgram;
protected int glCamTextureLoc;
protected int glCamPostionLoc;
protected int glCamTextureCoordLoc;
protected int glImageTextureLoc;
protected int glImageRectLoc;
protected String vertexShader_filter = "" +
"attribute vec4 aCamPosition;\n" +
"attribute vec2 aCamTextureCoord;\n" +
"varying vec2 vCamTextureCoord;\n" +
"void main(){\n" +
" gl_Position= aCamPosition;\n" +
" vCamTextureCoord = aCamTextureCoord;\n" +
"}";
protected String fragmentshader_filter = "" +
"precision mediump float;\n" +
"varying mediump vec2 vCamTextureCoord;\n" +
"uniform sampler2D uCamTexture;\n" +
"uniform sampler2D uImageTexture;\n" +
"uniform vec4 imageRect;\n" +
"void main(){\n" +
" lowp vec4 c1 = texture2D(uCamTexture, vCamTextureCoord);\n" +
" lowp vec2 vCamTextureCoord2 = vec2(vCamTextureCoord.x,1.0-vCamTextureCoord.y);\n" +
" if(vCamTextureCoord2.x>imageRect.r && vCamTextureCoord2.x<imageRect.b && vCamTextureCoord2.y>imageRect.g && vCamTextureCoord2.y<imageRect.a)\n" +
" {\n" +
" vec2 imagexy = vec2((vCamTextureCoord2.x-imageRect.r)/(imageRect.b-imageRect.r),(vCamTextureCoord2.y-imageRect.g)/(imageRect.a-imageRect.g));\n" +
" lowp vec4 c2 = texture2D(uImageTexture, imagexy);\n" +
" lowp vec4 outputColor = c2+c1*c1.a*(1.0-c2.a);\n" +
" outputColor.a = 1.0;\n" +
" gl_FragColor = outputColor;\n" +
" }else\n" +
" {\n" +
" gl_FragColor = c1;\n" +
" }\n" +
"}";
protected int imageTexture = GLESTools.NO_TEXTURE;
protected final Object syncBitmap = new Object();
protected Bitmap iconBitmap;
protected boolean needUpdate;
protected RectF iconRectF;
protected Rect iconRect;
public WatermarkFilter(Bitmap _bitmap, Rect _rect) {
iconBitmap = _bitmap;
needUpdate = true;
iconRectF = new RectF();
iconRect = _rect;
}
protected WatermarkFilter() {
iconBitmap = null;
needUpdate = false;
iconRectF = new RectF(0,0,0,0);
}
public void updateIcon(Bitmap _bitmap, Rect _rect) {
synchronized (syncBitmap) {
if (_bitmap != null) {
iconBitmap = _bitmap;
needUpdate = true;
}
if (_rect != null) {
iconRect = _rect;
}
}
}
public void updateText(float textSize,String text){
updateIcon(fromText(textSize, text),iconRect);
}
public static Bitmap fromText(float textSize, String text) {
text=" "+text+" ";
Paint paint = new Paint();
paint.setTextSize(textSize);
paint.setTextAlign(Paint.Align.LEFT);
paint.setColor(Color.WHITE);
Paint.FontMetricsInt fm = paint.getFontMetricsInt();
int width = (int)paint.measureText(text);
int height = fm.descent - fm.ascent;
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bitmap);
canvas.drawARGB(80,255,255,255);
canvas.drawText(text, 0, fm.leading - fm.ascent, paint);
canvas.save();
return bitmap;
}
@Override
public void onInit(int VWidth, int VHeight) {
super.onInit(VWidth, VHeight);
glProgram = GLESTools.createProgram(vertexShader_filter, fragmentshader_filter);
GLES20.glUseProgram(glProgram);
glCamTextureLoc = GLES20.glGetUniformLocation(glProgram, "uCamTexture");
glImageTextureLoc = GLES20.glGetUniformLocation(glProgram, "uImageTexture");
glCamPostionLoc = GLES20.glGetAttribLocation(glProgram, "aCamPosition");
glCamTextureCoordLoc = GLES20.glGetAttribLocation(glProgram, "aCamTextureCoord");
glImageRectLoc = GLES20.glGetUniformLocation(glProgram, "imageRect");
}
@Override
public void onDraw(int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textrueBuffer) {
synchronized (syncBitmap) {
if (needUpdate) {
if (imageTexture != GLESTools.NO_TEXTURE) {
GLES20.glDeleteTextures(1, new int[]{imageTexture}, 0);
}
imageTexture = GLESTools.loadTexture(iconBitmap, GLESTools.NO_TEXTURE);
}
}
iconRectF.top = iconRect.top / (float) SIZE_HEIGHT;
iconRectF.bottom = iconRect.bottom / (float) SIZE_HEIGHT;
iconRectF.left = iconRect.left / (float) SIZE_WIDTH;
iconRectF.right = iconRect.right / (float) SIZE_WIDTH;
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, targetFrameBuffer);
GLES20.glUseProgram(glProgram);
GLES20.glUniform4f(glImageRectLoc, iconRectF.left, iconRectF.top, iconRectF.right, iconRectF.bottom);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, cameraTexture);
GLES20.glUniform1i(glCamTextureLoc, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, imageTexture);
GLES20.glUniform1i(glImageTextureLoc, 1);
GLES20.glEnableVertexAttribArray(glCamPostionLoc);
GLES20.glEnableVertexAttribArray(glCamTextureCoordLoc);
shapeBuffer.position(0);
GLES20.glVertexAttribPointer(glCamPostionLoc, 2,
GLES20.GL_FLOAT, false,
2 * 4, shapeBuffer);
textrueBuffer.position(0);
GLES20.glVertexAttribPointer(glCamTextureCoordLoc, 2,
GLES20.GL_FLOAT, false,
2 * 4, textrueBuffer);
GLES20.glViewport(0, 0, SIZE_WIDTH, SIZE_HEIGHT);
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawIndecesBuffer.limit(), GLES20.GL_UNSIGNED_SHORT, drawIndecesBuffer);
GLES20.glFinish();
GLES20.glDisableVertexAttribArray(glCamPostionLoc);
GLES20.glDisableVertexAttribArray(glCamTextureCoordLoc);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
GLES20.glUseProgram(0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}
@Override
public void onDestroy() {
super.onDestroy();
GLES20.glDeleteProgram(glProgram);
GLES20.glDeleteTextures(1, new int[]{imageTexture}, 0);
}
}

View File

@@ -0,0 +1,136 @@
package me.lake.librestreaming.ws.filter.hardfilter.extra;
import android.opengl.GLES20;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import jp.co.cyberagent.android.gpuimage.GPUImageFilter;
import me.lake.librestreaming.filter.hardvideofilter.BaseHardVideoFilter;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.tools.GLESTools;
public class GPUImageCompatibleFilter<T extends GPUImageFilter> extends BaseHardVideoFilter {
private T innerGPUImageFilter;
private FloatBuffer innerShapeBuffer;
private FloatBuffer innerTextureBuffer;
public GPUImageCompatibleFilter(T filter) {
innerGPUImageFilter = filter;
}
public T getGPUImageFilter() {
return innerGPUImageFilter;
}
@Override
public void onInit(int VWidth, int VHeight) {
super.onInit(VWidth, VHeight);
innerGPUImageFilter.init();
innerGPUImageFilter.onOutputSizeChanged(VWidth, VHeight);
}
@Override
public void onDraw(int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textrueBuffer) {
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, targetFrameBuffer);
innerGPUImageFilter.onDraw(cameraTexture, innerShapeBuffer, innerTextureBuffer);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}
@Override
public void onDestroy() {
super.onDestroy();
innerGPUImageFilter.destroy();
}
@Override
public void onDirectionUpdate(int _directionFlag) {
if (directionFlag != _directionFlag) {
innerShapeBuffer = getGPUImageCompatShapeVerticesBuffer();
innerTextureBuffer = getGPUImageCompatTextureVerticesBuffer(directionFlag);
}
}
public static final float TEXTURE_NO_ROTATION[] = {
1.0f, 1.0f,
0.0f, 1.0f,
1.0f, 0.0f,
0.0f, 0.0f,
};
public static final float TEXTURE_ROTATED_90[] = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 1.0f,
1.0f, 0.0f,
};
public static final float TEXTURE_ROTATED_180[] = {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
};
public static final float TEXTURE_ROTATED_270[] = {
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 0.0f,
0.0f, 1.0f,
};
static final float CUBE[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
public static FloatBuffer getGPUImageCompatShapeVerticesBuffer() {
FloatBuffer result = ByteBuffer.allocateDirect(GLESTools.FLOAT_SIZE_BYTES * CUBE.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
result.put(CUBE);
result.position(0);
return result;
}
public static FloatBuffer getGPUImageCompatTextureVerticesBuffer(final int directionFlag) {
float[] buffer;
switch (directionFlag & 0xF0) {
case RESCoreParameters.FLAG_DIRECTION_ROATATION_90:
buffer = TEXTURE_ROTATED_90.clone();
break;
case RESCoreParameters.FLAG_DIRECTION_ROATATION_180:
buffer = TEXTURE_ROTATED_180.clone();
break;
case RESCoreParameters.FLAG_DIRECTION_ROATATION_270:
buffer = TEXTURE_ROTATED_270.clone();
break;
default:
buffer = TEXTURE_NO_ROTATION.clone();
}
if ((directionFlag & RESCoreParameters.FLAG_DIRECTION_FLIP_HORIZONTAL) != 0) {
buffer[0] = flip(buffer[0]);
buffer[2] = flip(buffer[2]);
buffer[4] = flip(buffer[4]);
buffer[6] = flip(buffer[6]);
}
if ((directionFlag & RESCoreParameters.FLAG_DIRECTION_FLIP_VERTICAL) != 0) {
buffer[1] = flip(buffer[1]);
buffer[3] = flip(buffer[3]);
buffer[5] = flip(buffer[5]);
buffer[7] = flip(buffer[7]);
}
FloatBuffer result = ByteBuffer.allocateDirect(GLESTools.FLOAT_SIZE_BYTES * buffer.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
result.put(buffer);
result.position(0);
return result;
}
private static float flip(final float i) {
return i == 0.0f ? 1.0f : 0.0f;
}
}