This commit is contained in:
2022-03-11 08:43:32 +08:00
commit 8f9ae202db
148 changed files with 23318 additions and 0 deletions

1
libWSLive/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/build

42
libWSLive/build.gradle Normal file
View File

@@ -0,0 +1,42 @@
apply plugin: 'com.android.library'
android {
compileSdkVersion 31
defaultConfig {
targetSdkVersion 31
ndk{
abiFilters "armeabi-v7a"
}
}
lintOptions {
abortOnError false
checkReleaseBuilds false
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
/* externalNativeBuild{
ndkBuild{
path file("src/main/jni/rtmp/Android.mk")
}
}*/
/* sourceSets {
main {
jniLibs.srcDirs 'src/main/jniLibs'
jni.srcDirs = []
}
}*/
}
dependencies {
implementation 'jp.co.cyberagent.android.gpuimage:gpuimage-library:1.4.1'
}

17
libWSLive/proguard-rules.pro vendored Normal file
View File

@@ -0,0 +1,17 @@
# Add project specific ProGuard rules here.
# By default, the flags in this file are appended to flags specified
# in /home/lake/Android/Sdk/tools/proguard/proguard-android.txt
# You can edit the include path and order by changing the proguardFiles
# directive in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# Add any project specific keep options here:
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}

View File

@@ -0,0 +1,17 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="me.lake.librestreaming">
<uses-permission android:name="android.permission.CAMERA"/>
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.FLASHLIGHT" />
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS"/>
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<application android:label="@string/app_name"
android:largeHeap="true">
</application>
</manifest>

View File

@@ -0,0 +1,34 @@
package me.lake.librestreaming.client;
import android.os.Handler;
import android.os.Looper;
import java.util.concurrent.Executor;
public class CallbackDelivery {
static private CallbackDelivery instance;
private final Executor mCallbackPoster;
private final Handler handler = new Handler(Looper.getMainLooper());
public static CallbackDelivery i() {
return instance == null ? instance = new CallbackDelivery() : instance;
}
private CallbackDelivery() {
mCallbackPoster = new Executor() {
@Override
public void execute(Runnable command) {
handler.post(command);
}
};
}
public void post(Runnable runnable) {
mCallbackPoster.execute(runnable);
}
public void postDelayed(Runnable runnable, long time) {
handler.postDelayed(runnable,time);
}
}

View File

@@ -0,0 +1,6 @@
package me.lake.librestreaming.client;
public class Constants {
public static String VERSION = "0.1.0";
}

View File

@@ -0,0 +1,134 @@
package me.lake.librestreaming.client;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import me.lake.librestreaming.core.RESSoftAudioCore;
import me.lake.librestreaming.filter.softaudiofilter.BaseSoftAudioFilter;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.tools.LogTools;
public class RESAudioClient {
RESCoreParameters resCoreParameters;
private final Object syncOp = new Object();
private AudioRecordThread audioRecordThread;
private AudioRecord audioRecord;
private byte[] audioBuffer;
private RESSoftAudioCore softAudioCore;
public RESAudioClient(RESCoreParameters parameters) {
resCoreParameters = parameters;
}
public boolean prepare(RESConfig resConfig) {
synchronized (syncOp) {
resCoreParameters.audioBufferQueueNum = 5;
softAudioCore = new RESSoftAudioCore(resCoreParameters);
if (!softAudioCore.prepare(resConfig)) {
LogTools.e("RESAudioClient,prepare");
return false;
}
resCoreParameters.audioRecoderFormat = AudioFormat.ENCODING_PCM_16BIT;
resCoreParameters.audioRecoderChannelConfig = AudioFormat.CHANNEL_IN_MONO;
resCoreParameters.audioRecoderSliceSize = resCoreParameters.mediacodecAACSampleRate / 10;
resCoreParameters.audioRecoderBufferSize = resCoreParameters.audioRecoderSliceSize * 2;
resCoreParameters.audioRecoderSource = MediaRecorder.AudioSource.DEFAULT;
resCoreParameters.audioRecoderSampleRate = resCoreParameters.mediacodecAACSampleRate;
prepareAudio();
return true;
}
}
public boolean start(RESFlvDataCollecter flvDataCollecter) {
synchronized (syncOp) {
softAudioCore.start(flvDataCollecter);
audioRecord.startRecording();
audioRecordThread = new AudioRecordThread();
audioRecordThread.start();
LogTools.d("RESAudioClient,start()");
return true;
}
}
public boolean stop() {
synchronized (syncOp) {
if(audioRecordThread != null) {
audioRecordThread.quit();
try {
audioRecordThread.join();
} catch (InterruptedException ignored) {
}
softAudioCore.stop();
audioRecordThread = null;
audioRecord.stop();
return true;
}
return true;
}
}
public boolean destroy() {
synchronized (syncOp) {
audioRecord.release();
return true;
}
}
public void setSoftAudioFilter(BaseSoftAudioFilter baseSoftAudioFilter) {
softAudioCore.setAudioFilter(baseSoftAudioFilter);
}
public BaseSoftAudioFilter acquireSoftAudioFilter() {
return softAudioCore.acquireAudioFilter();
}
public void releaseSoftAudioFilter() {
softAudioCore.releaseAudioFilter();
}
private boolean prepareAudio() {
int minBufferSize = AudioRecord.getMinBufferSize(resCoreParameters.audioRecoderSampleRate,
resCoreParameters.audioRecoderChannelConfig,
resCoreParameters.audioRecoderFormat);
audioRecord = new AudioRecord(resCoreParameters.audioRecoderSource,
resCoreParameters.audioRecoderSampleRate,
resCoreParameters.audioRecoderChannelConfig,
resCoreParameters.audioRecoderFormat,
minBufferSize * 5);
audioBuffer = new byte[resCoreParameters.audioRecoderBufferSize];
if (AudioRecord.STATE_INITIALIZED != audioRecord.getState()) {
LogTools.e("audioRecord.getState()!=AudioRecord.STATE_INITIALIZED!");
return false;
}
if (AudioRecord.SUCCESS != audioRecord.setPositionNotificationPeriod(resCoreParameters.audioRecoderSliceSize)) {
LogTools.e("AudioRecord.SUCCESS != audioRecord.setPositionNotificationPeriod(" + resCoreParameters.audioRecoderSliceSize + ")");
return false;
}
return true;
}
class AudioRecordThread extends Thread {
private boolean isRunning = true;
AudioRecordThread() {
isRunning = true;
}
public void quit() {
isRunning = false;
}
@Override
public void run() {
LogTools.d("AudioRecordThread,tid=" + Thread.currentThread().getId());
while (isRunning) {
int size = audioRecord.read(audioBuffer, 0, audioBuffer.length);
if (isRunning && softAudioCore != null && size > 0) {
softAudioCore.queueAudio(audioBuffer);
}
}
}
}
}

View File

@@ -0,0 +1,541 @@
package me.lake.librestreaming.client;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.os.Build;
import android.widget.Toast;
import java.lang.ref.WeakReference;
import me.lake.librestreaming.core.listener.RESConnectionListener;
import me.lake.librestreaming.core.listener.RESScreenShotListener;
import me.lake.librestreaming.core.listener.RESVideoChangeListener;
import me.lake.librestreaming.encoder.MediaVideoEncoder;
import me.lake.librestreaming.filter.hardvideofilter.BaseHardVideoFilter;
import me.lake.librestreaming.filter.softaudiofilter.BaseSoftAudioFilter;
import me.lake.librestreaming.filter.softvideofilter.BaseSoftVideoFilter;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.model.Size;
import me.lake.librestreaming.rtmp.RESFlvData;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.rtmp.RESRtmpSender;
import me.lake.librestreaming.tools.LogTools;
public class RESClient {
public RESVideoClient videoClient;
private RESAudioClient audioClient;
private final Object SyncOp;
//parameters
RESCoreParameters coreParameters;
private RESRtmpSender rtmpSender;
private RESFlvDataCollecter dataCollecter;
//是否在推流
public boolean isStreaming = false;
private WeakReference<Activity> mActivity;
public RESClient() {
SyncOp = new Object();
coreParameters = new RESCoreParameters();
CallbackDelivery.i();
}
public void setContext(Context context){
if(context instanceof Activity){
this.mActivity = new WeakReference<Activity>((Activity) context);
}
}
/**
* prepare to stream
*
* @param resConfig config
* @return true if prepare success
*/
public boolean prepare(RESConfig resConfig) {
synchronized (SyncOp) {
checkDirection(resConfig);
coreParameters.filterMode = resConfig.getFilterMode();
coreParameters.rtmpAddr = resConfig.getRtmpAddr();
coreParameters.printDetailMsg = resConfig.isPrintDetailMsg();
coreParameters.senderQueueLength = 200;//150
videoClient = new RESVideoClient(coreParameters);
videoClient.setActivity(mActivity.get());
audioClient = new RESAudioClient(coreParameters);
if (!videoClient.prepare(resConfig)) {
LogTools.d("!!!!!videoClient.prepare()failed");
LogTools.d(coreParameters.toString());
return false;
}
if (!audioClient.prepare(resConfig)) {
LogTools.d("!!!!!audioClient.prepare()failed");
LogTools.d(coreParameters.toString());
return false;
}
rtmpSender = new RESRtmpSender();
rtmpSender.prepare(coreParameters);
dataCollecter = new RESFlvDataCollecter() {
@Override
public void collect(RESFlvData flvData, int type) {
if(rtmpSender != null){
rtmpSender.feed(flvData, type);
}
}
};
coreParameters.done = true;
LogTools.d("===INFO===coreParametersReady:");
LogTools.d(coreParameters.toString());
return true;
}
}
/**
* start streaming
*/
public void startStreaming(String rtmpAddr) {
isStreaming = true;
synchronized (SyncOp) {
try {
videoClient.startStreaming(dataCollecter);
rtmpSender.start(rtmpAddr == null ? coreParameters.rtmpAddr : rtmpAddr);
audioClient.start(dataCollecter);
LogTools.d("RESClient,startStreaming()");
}catch (Exception e){
if(mActivity.get() !=null){
Toast.makeText(mActivity.get(),"可能没有权限",Toast.LENGTH_LONG).show();
mActivity.get().finish();
}
}
}
}
/**
* start streaming
*/
public void startStreaming() {
isStreaming = true;
synchronized (SyncOp) {
videoClient.startStreaming(dataCollecter);
rtmpSender.start(coreParameters.rtmpAddr);
audioClient.start(dataCollecter);
LogTools.d("RESClient,startStreaming()");
}
}
/**
* stop streaming
*/
public void stopStreaming() {
isStreaming = false;
synchronized (SyncOp) {
videoClient.stopStreaming();
audioClient.stop();
rtmpSender.stop();
LogTools.d("RESClient,stopStreaming()");
}
}
/**
* clean up
*/
public void destroy() {
synchronized (SyncOp) {
rtmpSender.destroy();
videoClient.destroy();
audioClient.destroy();
rtmpSender = null;
videoClient = null;
audioClient = null;
LogTools.d("RESClient,destroy()");
}
}
/**
* call it AFTER {@link #prepare(RESConfig)}
*
* @param surfaceTexture to rendering preview
*/
public void startPreview(SurfaceTexture surfaceTexture, int visualWidth, int visualHeight) {
if(videoClient != null){
videoClient.startPreview(surfaceTexture, visualWidth, visualHeight);
}
LogTools.d("RESClient,startPreview()");
}
public void updatePreview(int visualWidth, int visualHeight) {
if(videoClient != null){
videoClient.updatePreview(visualWidth, visualHeight);
}
LogTools.d("RESClient,updatePreview()");
}
public Camera getCamera(){
return videoClient.getCamera();
}
public int getCameraId(){
return videoClient.currentCameraIndex;
}
/**
*
* @param releaseTexture true if you won`t reuse this surfaceTexture later
*/
public void stopPreview(boolean releaseTexture) {
if(videoClient != null){
videoClient.stopPreview(releaseTexture);
}
LogTools.d("RESClient,stopPreview()");
}
/**
* change camera on running.<br/>
*/
public boolean swapCamera() {
synchronized (SyncOp) {
LogTools.d("RESClient,swapCamera()");
return videoClient.swapCamera();
}
}
/**
* only for soft filter mode.<br/>
* use it to update filter property.<br/>
* call it with {@link #releaseSoftVideoFilter()}<br/>
* make sure to release it in 3ms
*
* @return the videofilter in use
*/
public BaseSoftVideoFilter acquireSoftVideoFilter() {
return videoClient.acquireSoftVideoFilter();
}
/**
* only for soft filter mode.<br/>
* call it with {@link #acquireSoftVideoFilter()}
*/
public void releaseSoftVideoFilter() {
videoClient.releaseSoftVideoFilter();
}
/**
* get the real video size,call after prepare()
*
* @return
*/
public Size getVideoSize() {
return new Size(coreParameters.videoWidth, coreParameters.videoHeight);
}
/**
* get the rtmp server ip addr ,call after connect success.
*
* @return
*/
public String getServerIpAddr() {
synchronized (SyncOp) {
return rtmpSender == null ? null : rtmpSender.getServerIpAddr();
}
}
/**
* get the real draw frame rate of screen
*
* @return
*/
public float getDrawFrameRate() {
synchronized (SyncOp) {
return videoClient == null ? 0 : videoClient.getDrawFrameRate();
}
}
/**
* get the rate of video frame sent by rtmp
*
* @return
*/
public float getSendFrameRate() {
synchronized (SyncOp) {
return rtmpSender == null ? 0 : rtmpSender.getSendFrameRate();
}
}
/**
* get free percent of send buffer
* return ~0.0 if the netspeed is not enough or net is blocked.
* @return
*/
public float getSendBufferFreePercent() {
synchronized (SyncOp) {
return rtmpSender == null ? 0 : rtmpSender.getSendBufferFreePercent();
}
}
/**
* only for soft filter mode.<br/>
* set videofilter.<br/>
* can be called Repeatedly.<br/>
* do NOT call it between {@link #acquireSoftVideoFilter()} & {@link #releaseSoftVideoFilter()}
*
* @param baseSoftVideoFilter videofilter to apply
*/
public void setSoftVideoFilter(BaseSoftVideoFilter baseSoftVideoFilter) {
videoClient.setSoftVideoFilter(baseSoftVideoFilter);
}
/**
* only for hard filter mode.<br/>
* use it to update filter property.<br/>
* call it with {@link #releaseHardVideoFilter()}<br/>
* make sure to release it in 3ms
*
* @return the videofilter in use
*/
public BaseHardVideoFilter acquireHardVideoFilter() {
return videoClient.acquireHardVideoFilter();
}
/**
* only for hard filter mode.<br/>
* call it with {@link #acquireHardVideoFilter()}
*/
public void releaseHardVideoFilter() {
videoClient.releaseHardVideoFilter();
}
/**
* only for hard filter mode.<br/>
* set videofilter.<br/>
* can be called Repeatedly.<br/>
* do NOT call it between {@link #acquireHardVideoFilter()} & {@link #acquireHardVideoFilter()}
*
* @param baseHardVideoFilter videofilter to apply
*/
public void setHardVideoFilter(BaseHardVideoFilter baseHardVideoFilter) {
videoClient.setHardVideoFilter(baseHardVideoFilter);
}
/**
* set audiofilter.<br/>
* can be called Repeatedly.<br/>
* do NOT call it between {@link #acquireSoftAudioFilter()} & {@link #releaseSoftAudioFilter()}
*
* @param baseSoftAudioFilter audiofilter to apply
*/
public void setSoftAudioFilter(BaseSoftAudioFilter baseSoftAudioFilter) {
audioClient.setSoftAudioFilter(baseSoftAudioFilter);
}
/**
* use it to update filter property.<br/>
* call it with {@link #releaseSoftAudioFilter()}<br/>
* make sure to release it in 3ms
*
* @return the audiofilter in use
*/
public BaseSoftAudioFilter acquireSoftAudioFilter() {
return audioClient.acquireSoftAudioFilter();
}
/**
* call it with {@link #acquireSoftAudioFilter()}
*/
public void releaseSoftAudioFilter() {
audioClient.releaseSoftAudioFilter();
}
/**
* get video & audio real send Speed
*
* @return speed in B/s
*/
public int getAVSpeed() {
synchronized (SyncOp) {
return rtmpSender == null ? 0 : rtmpSender.getTotalSpeed();
}
}
/**
* call it AFTER {@link #prepare(RESConfig)}
*
* @param connectionListener
*/
public void setConnectionListener(RESConnectionListener connectionListener) {
if(rtmpSender != null) {
rtmpSender.setConnectionListener(connectionListener);
}
}
/**
* listener for video size change
* @param videoChangeListener
*/
public void setVideoChangeListener(RESVideoChangeListener videoChangeListener) {
if(videoClient != null){
videoClient.setVideoChangeListener(videoChangeListener);
}
}
/**
* get the param of video,audio,mediacodec
*
* @return info
*/
public String getConfigInfo() {
return coreParameters.toString();
}
/**
* set zoom by percent [0.0f,1.0f]
*
* @param targetPercent zoompercent
*/
public boolean setZoomByPercent(float targetPercent) {
return videoClient.setZoomByPercent(targetPercent);
}
/**
* toggle flash light
*
* @return true if operation success
*/
public boolean toggleFlashLight() {
return videoClient.toggleFlashLight();
}
public void takeScreenShot(RESScreenShotListener listener) {
videoClient.takeScreenShot(listener);
}
/**
* Change video bitrate on the fly<br/>
* call between {@link #startStreaming()} & {@link #stopStreaming()}
* @param bitrate target bitrate bits/sec
*/
@TargetApi(Build.VERSION_CODES.KITKAT)
public void reSetVideoBitrate(int bitrate) {
videoClient.reSetVideoBitrate(bitrate);
}
/**
* get current bitrate
* @return current bitrate bits/sec
*/
public int getVideoBitrate() {
return videoClient.getVideoBitrate();
}
/**
* update fps on the fly.
* @param fps
*/
public void reSetVideoFPS(int fps) {
videoClient.reSetVideoFPS(fps);
}
/**
* only work with hard mode.
* reset video size on the fly.
* may restart camera.
* will restart mediacodec.
* will not interrupt streaming
* @param targetVideoSize
*/
public void reSetVideoSize(Size targetVideoSize) {
if (targetVideoSize == null) {
return;
}
if (coreParameters.filterMode == RESCoreParameters.FILTER_MODE_SOFT) {
throw new IllegalArgumentException("soft mode doesn`t support reSetVideoSize");
}else {
videoClient.reSetVideoSize(targetVideoSize);
}
}
public RESRtmpSender getRtmpSender(){
return rtmpSender;
}
public String getVertion() {
return Constants.VERSION;
}
/**
* =====================PRIVATE=================
**/
private void checkDirection(RESConfig resConfig) {
int frontFlag = resConfig.getFrontCameraDirectionMode();
int backFlag = resConfig.getBackCameraDirectionMode();
int fbit = 0;
int bbit = 0;
if ((frontFlag >> 4) == 0) {
frontFlag |= RESCoreParameters.FLAG_DIRECTION_ROATATION_0;
}
if ((backFlag >> 4) == 0) {
backFlag |= RESCoreParameters.FLAG_DIRECTION_ROATATION_0;
}
for (int i = 4; i <= 8; ++i) {
if (((frontFlag >> i) & 0x1) == 1) {
fbit++;
}
if (((backFlag >> i) & 0x1) == 1) {
bbit++;
}
}
if (fbit != 1 || bbit != 1) {
throw new RuntimeException("invalid direction rotation flag:frontFlagNum=" + fbit + ",backFlagNum=" + bbit);
}
if (((frontFlag & RESCoreParameters.FLAG_DIRECTION_ROATATION_0) != 0) || ((frontFlag & RESCoreParameters.FLAG_DIRECTION_ROATATION_180) != 0)) {
fbit = 0;
} else {
fbit = 1;
}
if (((backFlag & RESCoreParameters.FLAG_DIRECTION_ROATATION_0) != 0) || ((backFlag & RESCoreParameters.FLAG_DIRECTION_ROATATION_180) != 0)) {
bbit = 0;
} else {
bbit = 1;
}
if (bbit != fbit) {
if (bbit == 0) {
throw new RuntimeException("invalid direction rotation flag:back camera is landscape but front camera is portrait");
} else {
throw new RuntimeException("invalid direction rotation flag:back camera is portrait but front camera is landscape");
}
}
if (fbit == 1) {
coreParameters.isPortrait = true;
} else {
coreParameters.isPortrait = false;
}
coreParameters.backCameraDirectionMode = backFlag;
coreParameters.frontCameraDirectionMode = frontFlag;
System.out.println("coreParameters.backCameraDirectionMode = " + coreParameters.backCameraDirectionMode);
}
public void setVideoEncoder(final MediaVideoEncoder encoder) {
videoClient.setVideoEncoder(encoder);
}
public void setMirror(boolean isEnableMirror,boolean isEnablePreviewMirror,boolean isEnableStreamMirror) {
videoClient.setMirror(isEnableMirror,isEnablePreviewMirror,isEnableStreamMirror);
}
public void setNeedResetEglContext(boolean bol){
videoClient.setNeedResetEglContext(bol);
}
public void setCreamAr(){
videoClient.setCameraArea();
}
static {
System.loadLibrary("restreaming");
}
}

View File

@@ -0,0 +1,525 @@
package me.lake.librestreaming.client;
import android.app.Activity;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import me.lake.librestreaming.core.CameraHelper;
import me.lake.librestreaming.core.RESHardVideoCore;
import me.lake.librestreaming.core.RESSoftVideoCore;
import me.lake.librestreaming.core.RESVideoCore;
import me.lake.librestreaming.core.listener.RESScreenShotListener;
import me.lake.librestreaming.core.listener.RESVideoChangeListener;
import me.lake.librestreaming.encoder.MediaVideoEncoder;
import me.lake.librestreaming.filter.hardvideofilter.BaseHardVideoFilter;
import me.lake.librestreaming.filter.softvideofilter.BaseSoftVideoFilter;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.model.Size;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.tools.BuffSizeCalculator;
import me.lake.librestreaming.tools.CameraUtil;
import me.lake.librestreaming.tools.LogTools;
public class RESVideoClient {
RESCoreParameters resCoreParameters;
private final Object syncOp = new Object();
private Camera camera;
public SurfaceTexture camTexture;
private int cameraNum;
public int currentCameraIndex;
private RESVideoCore videoCore;
private boolean isStreaming;
private boolean isPreviewing;
public RESVideoClient(RESCoreParameters parameters) {
resCoreParameters = parameters;
cameraNum = Camera.getNumberOfCameras();
currentCameraIndex = Camera.CameraInfo.CAMERA_FACING_BACK;
isStreaming = false;
isPreviewing = false;
}
public boolean prepare(RESConfig resConfig) {
synchronized (syncOp) {
if ((cameraNum - 1) >= resConfig.getDefaultCamera()) {
currentCameraIndex = resConfig.getDefaultCamera();
}
if (null == (camera = createCamera(currentCameraIndex))) {
LogTools.e("can not open camera");
return false;
}
Camera.Parameters parameters = camera.getParameters();
CameraHelper.selectCameraPreviewWH(parameters, resCoreParameters, resConfig.getTargetPreviewSize());
CameraHelper.selectCameraFpsRange(parameters, resCoreParameters);
if (resConfig.getVideoFPS() > resCoreParameters.previewMaxFps / 1000) {
resCoreParameters.videoFPS = resCoreParameters.previewMaxFps / 1000;
} else {
resCoreParameters.videoFPS = resConfig.getVideoFPS();
}
resoveResolution(resCoreParameters, resConfig.getTargetVideoSize());
if (!CameraHelper.selectCameraColorFormat(parameters, resCoreParameters)) {
LogTools.e("CameraHelper.selectCameraColorFormat,Failed");
resCoreParameters.dump();
return false;
}
if (!CameraHelper.configCamera(camera, resCoreParameters)) {
LogTools.e("CameraHelper.configCamera,Failed");
resCoreParameters.dump();
return false;
}
switch (resCoreParameters.filterMode) {
case RESCoreParameters.FILTER_MODE_SOFT:
videoCore = new RESSoftVideoCore(resCoreParameters);
break;
case RESCoreParameters.FILTER_MODE_HARD:
videoCore = new RESHardVideoCore(resCoreParameters);
break;
}
if (!videoCore.prepare(resConfig)) {
return false;
}
videoCore.setCurrentCamera(currentCameraIndex);
prepareVideo();
return true;
}
}
public Camera getCamera(){
return camera;
}
private Camera createCamera(int cameraId) {
try {
camera = Camera.open(cameraId);
CameraUtil.setCameraDisplayOrientation(activity,cameraId,camera);
} catch (SecurityException e) {
LogTools.trace("no permission", e);
return null;
} catch (Exception e) {
LogTools.trace("camera.open()failed", e);
return null;
}
return camera;
}
private boolean prepareVideo() {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_SOFT) {
camera.addCallbackBuffer(new byte[resCoreParameters.previewBufferSize]);
camera.addCallbackBuffer(new byte[resCoreParameters.previewBufferSize]);
}
return true;
}
private boolean startVideo() {
camTexture = new SurfaceTexture(RESVideoCore.OVERWATCH_TEXTURE_ID);
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_SOFT) {
camera.setPreviewCallbackWithBuffer(new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized (syncOp) {
if (videoCore != null && data != null) {
((RESSoftVideoCore) videoCore).queueVideo(data);
}
camera.addCallbackBuffer(data);
}
}
});
} else {
camTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
synchronized (syncOp) {
if (videoCore != null) {
((RESHardVideoCore) videoCore).onFrameAvailable();
}
}
}
});
}
try {
camera.setPreviewTexture(camTexture);
} catch (IOException e) {
LogTools.trace(e);
camera.release();
return false;
}
camera.startPreview();
return true;
}
public boolean startPreview(SurfaceTexture surfaceTexture, int visualWidth, int visualHeight) {
synchronized (syncOp) {
if (!isStreaming && !isPreviewing) {
if (!startVideo()) {
resCoreParameters.dump();
LogTools.e("RESVideoClient,start(),failed");
return false;
}
videoCore.updateCamTexture(camTexture);
}
videoCore.startPreview(surfaceTexture, visualWidth, visualHeight);
isPreviewing = true;
return true;
}
}
public void updatePreview(int visualWidth, int visualHeight) {
videoCore.updatePreview(visualWidth, visualHeight);
}
public boolean stopPreview(boolean releaseTexture) {
synchronized (syncOp) {
if (isPreviewing) {
videoCore.stopPreview(releaseTexture);
if (!isStreaming) {
camera.stopPreview();
videoCore.updateCamTexture(null);
camTexture.release();
}
}
isPreviewing = false;
return true;
}
}
public boolean startStreaming(RESFlvDataCollecter flvDataCollecter) {
synchronized (syncOp) {
if (!isStreaming && !isPreviewing) {
if (!startVideo()) {
resCoreParameters.dump();
LogTools.e("RESVideoClient,start(),failed");
return false;
}
videoCore.updateCamTexture(camTexture);
}
videoCore.startStreaming(flvDataCollecter);
isStreaming = true;
return true;
}
}
public boolean stopStreaming() {
synchronized (syncOp) {
if (isStreaming) {
videoCore.stopStreaming();
if (!isPreviewing) {
camera.stopPreview();
videoCore.updateCamTexture(null);
camTexture.release();
}
}
isStreaming = false;
return true;
}
}
public boolean destroy() {
synchronized (syncOp) {
camera.release();
videoCore.destroy();
videoCore = null;
camera = null;
return true;
}
}
public boolean swapCamera() {
synchronized (syncOp) {
LogTools.d("RESClient,swapCamera()");
camera.stopPreview();
camera.release();
camera = null;
if (null == (camera = createCamera(currentCameraIndex = (++currentCameraIndex) % cameraNum))) {
LogTools.e("can not swap camera");
return false;
}
videoCore.setCurrentCamera(currentCameraIndex);
CameraHelper.selectCameraFpsRange(camera.getParameters(), resCoreParameters);
if (!CameraHelper.configCamera(camera, resCoreParameters)) {
camera.release();
return false;
}
prepareVideo();
camTexture.release();
videoCore.updateCamTexture(null);
startVideo();
videoCore.updateCamTexture(camTexture);
return true;
}
}
public boolean toggleFlashLight() {
synchronized (syncOp) {
try {
Camera.Parameters parameters = camera.getParameters();
List<String> flashModes = parameters.getSupportedFlashModes();
String flashMode = parameters.getFlashMode();
if (!Camera.Parameters.FLASH_MODE_TORCH.equals(flashMode)) {
if (flashModes.contains(Camera.Parameters.FLASH_MODE_TORCH)) {
parameters.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH);
camera.setParameters(parameters);
return true;
}
} else if (!Camera.Parameters.FLASH_MODE_OFF.equals(flashMode)) {
if (flashModes.contains(Camera.Parameters.FLASH_MODE_OFF)) {
parameters.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
camera.setParameters(parameters);
return true;
}
}
} catch (Exception e) {
LogTools.d("toggleFlashLight,failed" + e.getMessage());
return false;
}
return false;
}
}
public boolean setZoomByPercent(float targetPercent) {
synchronized (syncOp) {
targetPercent = Math.min(Math.max(0f, targetPercent), 1f);
Camera.Parameters p = camera.getParameters();
p.setZoom((int) (p.getMaxZoom() * targetPercent));
camera.setParameters(p);
return true;
}
}
public void reSetVideoBitrate(int bitrate) {
synchronized (syncOp) {
if (videoCore != null) {
videoCore.reSetVideoBitrate(bitrate);
}
}
}
public int getVideoBitrate() {
synchronized (syncOp) {
if (videoCore != null) {
return videoCore.getVideoBitrate();
} else {
return 0;
}
}
}
public void reSetVideoFPS(int fps) {
synchronized (syncOp) {
int targetFps;
if (fps > resCoreParameters.previewMaxFps / 1000) {
targetFps = resCoreParameters.previewMaxFps / 1000;
} else {
targetFps = fps;
}
if (videoCore != null) {
videoCore.reSetVideoFPS(targetFps);
}
}
}
public boolean reSetVideoSize(Size targetVideoSize) {
synchronized (syncOp) {
RESCoreParameters newParameters = new RESCoreParameters();
newParameters.isPortrait = resCoreParameters.isPortrait;
newParameters.filterMode = resCoreParameters.filterMode;
Camera.Parameters parameters = camera.getParameters();
CameraHelper.selectCameraPreviewWH(parameters, newParameters, targetVideoSize);
resoveResolution(newParameters, targetVideoSize);
boolean needRestartCamera = (newParameters.previewVideoHeight != resCoreParameters.previewVideoHeight
|| newParameters.previewVideoWidth != resCoreParameters.previewVideoWidth);
if (needRestartCamera) {
newParameters.previewBufferSize = BuffSizeCalculator.calculator(resCoreParameters.previewVideoWidth,
resCoreParameters.previewVideoHeight, resCoreParameters.previewColorFormat);
resCoreParameters.previewVideoWidth = newParameters.previewVideoWidth;
resCoreParameters.previewVideoHeight = newParameters.previewVideoHeight;
resCoreParameters.previewBufferSize = newParameters.previewBufferSize;
if ((isPreviewing || isStreaming)) {
LogTools.d("RESClient,reSetVideoSize.restartCamera");
camera.stopPreview();
camera.release();
camera = null;
if (null == (camera = createCamera(currentCameraIndex))) {
LogTools.e("can not createCamera camera");
return false;
}
if (!CameraHelper.configCamera(camera, resCoreParameters)) {
camera.release();
return false;
}
prepareVideo();
videoCore.updateCamTexture(null);
camTexture.release();
startVideo();
videoCore.updateCamTexture(camTexture);
}
}
videoCore.reSetVideoSize(newParameters);
return true;
}
}
public BaseSoftVideoFilter acquireSoftVideoFilter() {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_SOFT) {
return ((RESSoftVideoCore) videoCore).acquireVideoFilter();
}
return null;
}
public void releaseSoftVideoFilter() {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_SOFT) {
((RESSoftVideoCore) videoCore).releaseVideoFilter();
}
}
public void setSoftVideoFilter(BaseSoftVideoFilter baseSoftVideoFilter) {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_SOFT) {
((RESSoftVideoCore) videoCore).setVideoFilter(baseSoftVideoFilter);
}
}
public BaseHardVideoFilter acquireHardVideoFilter() {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_HARD) {
return ((RESHardVideoCore) videoCore).acquireVideoFilter();
}
return null;
}
public void releaseHardVideoFilter() {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_HARD) {
((RESHardVideoCore) videoCore).releaseVideoFilter();
}
}
public void setHardVideoFilter(BaseHardVideoFilter baseHardVideoFilter) {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_HARD) {
System.err.println("videoCore is null ?"+videoCore);
((RESHardVideoCore) videoCore).setVideoFilter(baseHardVideoFilter);
}
}
public void takeScreenShot(RESScreenShotListener listener) {
synchronized (syncOp) {
if (videoCore != null) {
videoCore.takeScreenShot(listener);
}
}
}
public void setVideoChangeListener(RESVideoChangeListener listener) {
synchronized (syncOp) {
if (videoCore != null) {
videoCore.setVideoChangeListener(listener);
}
}
}
public float getDrawFrameRate() {
synchronized (syncOp) {
return videoCore == null ? 0 : videoCore.getDrawFrameRate();
}
}
private void resoveResolution(RESCoreParameters resCoreParameters, Size targetVideoSize) {
if (resCoreParameters.filterMode == RESCoreParameters.FILTER_MODE_SOFT) {
if (resCoreParameters.isPortrait) {
resCoreParameters.videoHeight = resCoreParameters.previewVideoWidth;
resCoreParameters.videoWidth = resCoreParameters.previewVideoHeight;
} else {
resCoreParameters.videoWidth = resCoreParameters.previewVideoWidth;
resCoreParameters.videoHeight = resCoreParameters.previewVideoHeight;
}
} else {
float pw, ph, vw, vh;
if (resCoreParameters.isPortrait) {
resCoreParameters.videoHeight = targetVideoSize.getWidth();
resCoreParameters.videoWidth = targetVideoSize.getHeight();
pw = resCoreParameters.previewVideoHeight;
ph = resCoreParameters.previewVideoWidth;
} else {
resCoreParameters.videoWidth = targetVideoSize.getWidth();
resCoreParameters.videoHeight = targetVideoSize.getHeight();
pw = resCoreParameters.previewVideoWidth;
ph = resCoreParameters.previewVideoHeight;
}
vw = resCoreParameters.videoWidth;
vh = resCoreParameters.videoHeight;
float pr = ph / pw, vr = vh / vw;
if (pr == vr) {
resCoreParameters.cropRatio = 0.0f;
} else if (pr > vr) {
resCoreParameters.cropRatio = (1.0f - vr / pr) / 2.0f;
} else {
resCoreParameters.cropRatio = -(1.0f - pr / vr) / 2.0f;
}
}
}
public void setVideoEncoder(final MediaVideoEncoder encoder) {
videoCore.setVideoEncoder(encoder);
}
public void setMirror(boolean isEnableMirror,boolean isEnablePreviewMirror,boolean isEnableStreamMirror) {
videoCore.setMirror(isEnableMirror,isEnablePreviewMirror,isEnableStreamMirror);
}
public void setNeedResetEglContext(boolean bol){
videoCore.setNeedResetEglContext(bol);
}
public void setActivity(Activity activity) {
this.activity=activity;
}
private Activity activity;
/**
* 转换对焦区域
* 范围(-1000, -1000, 1000, 1000)
* x,y是坐标位置width,height SurfaceView的宽高coefficient是区域比例大小
*/
private static Rect calculateTapArea(float x, float y, int width, int height, float coefficient) {
float focusAreaSize = 200;
//这段代码可以看出coefficient的作用只是为了扩展areaSize。
int areaSize = (int) (focusAreaSize * coefficient);
int surfaceWidth = width;
int surfaceHeight = height;
//解释一下为什么*2000因为要把surfaceView的坐标转换为范围(-1000, -1000, 1000, 1000)则SurfaceView的中心点坐标会转化为0,0x/surfaceWidth 得到当前x坐标占总宽度的比例然后乘以2000就换算成了0,02000,2000的坐标范围内然后减去1000就换算为了范围(-1000, -1000, 1000, 1000)的坐标。
//得到了x,y转换后的坐标利用areaSize就可以得到聚焦区域。
int centerX = (int) (x / surfaceHeight * 2000 - 1000);
int centerY = (int) (y / surfaceWidth * 2000 - 1000);
int left = clamp(centerX - (areaSize / 2), -1000, 1000);
int top = clamp(centerY - (areaSize / 2), -1000, 1000);
int right = clamp(left + areaSize, -1000, 1000);
int bottom = clamp(top + areaSize, -1000, 1000);
return new Rect(left, top, right, bottom);
}
//不大于最大值,不小于最小值
private static int clamp(int x, int min, int max) {
if (x > max) {
return max;
}
if (x < min) {
return min;
}
return x;
}
public void setCameraArea(){
System.out.println("设置对焦");
List<Camera.Area> focusAreas = new ArrayList<>();
focusAreas.add(new Camera.Area(calculateTapArea(0,0,5,5,1.0f), 800));
Camera.Parameters parameters=camera.getParameters();
parameters.setFocusAreas(focusAreas);
camera.setParameters(parameters);
camera.autoFocus(new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean b, Camera camera) {
System.out.println("对焦完成");
}
});
}
}

View File

@@ -0,0 +1,109 @@
package me.lake.librestreaming.core;
import android.media.MediaCodec;
import java.nio.ByteBuffer;
import me.lake.librestreaming.rtmp.RESFlvData;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.rtmp.RESRtmpSender;
import me.lake.librestreaming.tools.LogTools;
public class AudioSenderThread extends Thread {
private static final long WAIT_TIME = 5000;//1ms;
private MediaCodec.BufferInfo eInfo;
private long startTime = 0;
private MediaCodec dstAudioEncoder;
private RESFlvDataCollecter dataCollecter;
AudioSenderThread(String name, MediaCodec encoder, RESFlvDataCollecter flvDataCollecter) {
super(name);
eInfo = new MediaCodec.BufferInfo();
startTime = 0;
dstAudioEncoder = encoder;
dataCollecter = flvDataCollecter;
}
private boolean shouldQuit = false;
void quit() {
shouldQuit = true;
this.interrupt();
}
@Override
public void run() {
while (!shouldQuit) {
int eobIndex = dstAudioEncoder.dequeueOutputBuffer(eInfo, WAIT_TIME);
switch (eobIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
LogTools.d("AudioSenderThread,MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED");
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
// LogTools.d("AudioSenderThread,MediaCodec.INFO_TRY_AGAIN_LATER");
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
LogTools.d("AudioSenderThread,MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:" +
dstAudioEncoder.getOutputFormat().toString());
ByteBuffer csd0 = dstAudioEncoder.getOutputFormat().getByteBuffer("csd-0");
sendAudioSpecificConfig(0, csd0);
break;
default:
LogTools.d("AudioSenderThread,MediaCode,eobIndex=" + eobIndex);
if (startTime == 0) {
startTime = eInfo.presentationTimeUs / 1000;
}
/**
* we send audio SpecificConfig already in INFO_OUTPUT_FORMAT_CHANGED
* so we ignore MediaCodec.BUFFER_FLAG_CODEC_CONFIG
*/
if (eInfo.flags != MediaCodec.BUFFER_FLAG_CODEC_CONFIG && eInfo.size != 0) {
ByteBuffer realData = dstAudioEncoder.getOutputBuffers()[eobIndex];
realData.position(eInfo.offset);
realData.limit(eInfo.offset + eInfo.size);
sendRealData((eInfo.presentationTimeUs / 1000) - startTime, realData);
}
dstAudioEncoder.releaseOutputBuffer(eobIndex, false);
break;
}
}
eInfo = null;
}
private void sendAudioSpecificConfig(long tms, ByteBuffer realData) {
int packetLen = Packager.FLVPackager.FLV_AUDIO_TAG_LENGTH +
realData.remaining();
byte[] finalBuff = new byte[packetLen];
realData.get(finalBuff, Packager.FLVPackager.FLV_AUDIO_TAG_LENGTH,
realData.remaining());
Packager.FLVPackager.fillFlvAudioTag(finalBuff,
0,
true);
RESFlvData resFlvData = new RESFlvData();
resFlvData.droppable = false;
resFlvData.byteBuffer = finalBuff;
resFlvData.size = finalBuff.length;
resFlvData.dts = (int) tms;
resFlvData.flvTagType = RESFlvData.FLV_RTMP_PACKET_TYPE_AUDIO;
dataCollecter.collect(resFlvData, RESRtmpSender.FROM_AUDIO);
}
private void sendRealData(long tms, ByteBuffer realData) {
int packetLen = Packager.FLVPackager.FLV_AUDIO_TAG_LENGTH +
realData.remaining();
byte[] finalBuff = new byte[packetLen];
realData.get(finalBuff, Packager.FLVPackager.FLV_AUDIO_TAG_LENGTH,
realData.remaining());
Packager.FLVPackager.fillFlvAudioTag(finalBuff,
0,
false);
RESFlvData resFlvData = new RESFlvData();
resFlvData.droppable = true;
resFlvData.byteBuffer = finalBuff;
resFlvData.size = finalBuff.length;
resFlvData.dts = (int) tms;
resFlvData.flvTagType = RESFlvData.FLV_RTMP_PACKET_TYPE_AUDIO;
dataCollecter.collect(resFlvData, RESRtmpSender.FROM_AUDIO);
}
}

View File

@@ -0,0 +1,131 @@
package me.lake.librestreaming.core;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.hardware.Camera;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.model.Size;
import me.lake.librestreaming.tools.LogTools;
public class CameraHelper {
public static int targetFps = 30000;
private static int[] supportedSrcVideoFrameColorType = new int[]{ImageFormat.NV21, ImageFormat.YV12};
public static boolean configCamera(Camera camera, RESCoreParameters coreParameters) {
camera.cancelAutoFocus();
Camera.Parameters parameters = camera.getParameters();
parameters.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO);
List<String> focusModes = parameters.getSupportedFocusModes();
if (focusModes != null) {
for (String focusMode : focusModes) {
System.out.println("focusMode = " + focusMode);
}
/*if (focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
} else if (focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
} else if (focusModes.contains(Camera.Parameters.FOCUS_MODE_FIXED)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_FIXED);
}*/
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
}
parameters.setPreviewSize(coreParameters.previewVideoWidth, coreParameters.previewVideoHeight);
parameters.setPreviewFpsRange(coreParameters.previewMinFps, coreParameters.previewMaxFps);
try {
camera.setParameters(parameters);
} catch (Exception e) {
camera.release();
return false;
}
return true;
}
public static void selectCameraFpsRange(Camera.Parameters parameters, RESCoreParameters coreParameters) {
List<int[]> fpsRanges = parameters.getSupportedPreviewFpsRange();
/* Collections.sort(fpsRanges, new Comparator<int[]>() {
@Override
public int compare(int[] lhs, int[] rhs) {
int r = Math.abs(lhs[0] - targetFps) + Math.abs(lhs[1] - targetFps);
int l = Math.abs(rhs[0] - targetFps) + Math.abs(rhs[1] - targetFps);
if (r > l) {
return 1;
} else if (r < l) {
return -1;
} else {
return 0;
}
}
});*/
coreParameters.previewMinFps=fpsRanges.get(0)[0];
for (int[] fpsRange : fpsRanges) {
if(coreParameters.previewMinFps>fpsRange[0]){
coreParameters.previewMinFps=fpsRange[0];
}
if(coreParameters.previewMaxFps<fpsRange[1]){
coreParameters.previewMaxFps=fpsRange[1];
}
}
for (int[] range : fpsRanges) {
System.out.println("最小fps = " + range[0]);
System.out.println("最大fps = " + range[1]);
System.out.println("--------------");
}
System.out.println("最小fps *= " + coreParameters.previewMinFps);
System.out.println("最大fps *= " + coreParameters.previewMaxFps);
System.out.println("fpsRanges.size() = " + fpsRanges.size());
}
public static void selectCameraPreviewWH(Camera.Parameters parameters, RESCoreParameters coreParameters, Size targetSize) {
List<Camera.Size> previewsSizes = parameters.getSupportedPreviewSizes();
Collections.sort(previewsSizes, new Comparator<Camera.Size>() {
@Override
public int compare(Camera.Size lhs, Camera.Size rhs) {
if ((lhs.width * lhs.height) > (rhs.width * rhs.height)) {
return 1;
} else {
return -1;
}
}
});
for (Camera.Size size : previewsSizes) {
if (size.width >= targetSize.getWidth() && size.height >= targetSize.getHeight()) {
coreParameters.previewVideoWidth = size.width;
coreParameters.previewVideoHeight = size.height;
return;
}
}
}
public static boolean selectCameraColorFormat(Camera.Parameters parameters, RESCoreParameters coreParameters) {
List<Integer> srcColorTypes = new LinkedList<>();
List<Integer> supportedPreviewFormates = parameters.getSupportedPreviewFormats();
for (int colortype : supportedSrcVideoFrameColorType) {
if (supportedPreviewFormates.contains(colortype)) {
srcColorTypes.add(colortype);
}
}
//select preview colorformat
if (srcColorTypes.contains(coreParameters.previewColorFormat = ImageFormat.NV21)) {
coreParameters.previewColorFormat = ImageFormat.NV21;
} else if ((srcColorTypes.contains(coreParameters.previewColorFormat = ImageFormat.YV12))) {
coreParameters.previewColorFormat = ImageFormat.YV12;
} else {
LogTools.e("!!!!!!!!!!!UnSupport,previewColorFormat");
return false;
}
return true;
}
}

View File

@@ -0,0 +1,19 @@
package me.lake.librestreaming.core;
@SuppressWarnings("all")
public class ColorHelper {
static public native void NV21TOYUV420SP(byte[] src, byte[] dst, int YSize);
static public native void NV21TOYUV420P(byte[] src, byte[] dst, int YSize);
static public native void YUV420SPTOYUV420P(byte[] src, byte[] dst, int YSize);
static public native void NV21TOARGB(byte[] src, int[] dst, int width,int height);
static public native void FIXGLPIXEL(int[] src,int[] dst, int width,int height);
//slow
static public native void NV21Transform(byte[] src, byte[] dst, int srcwidth,int srcheight,int directionFlag);
}

View File

@@ -0,0 +1,499 @@
package me.lake.librestreaming.core;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import android.view.Surface;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import javax.microedition.khronos.egl.EGL10;
import me.lake.librestreaming.model.MediaCodecGLWapper;
import me.lake.librestreaming.model.OffScreenGLWapper;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.model.ScreenGLWapper;
import me.lake.librestreaming.tools.GLESTools;
public class GLHelper {
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
private static String VERTEXSHADER = "" +
"attribute vec4 aPosition;\n" +
"attribute vec2 aTextureCoord;\n" +
"varying vec2 vTextureCoord;\n" +
"void main(){\n" +
" gl_Position= aPosition;\n" +
" vTextureCoord = aTextureCoord;\n" +
"}";
private static final String VERTEXSHADER_CAMERA2D =
"attribute vec4 aPosition;\n" +
"attribute vec4 aTextureCoord;\n" +
"uniform mat4 uTextureMatrix;\n" +
"varying vec2 vTextureCoord;\n" +
"void main(){\n" +
" gl_Position= aPosition;\n" +
" vTextureCoord = (uTextureMatrix * aTextureCoord).xy;\n" +
"}";
private static String FRAGMENTSHADER_CAMERA = "" +
"#extension GL_OES_EGL_image_external : require\n" +
"precision highp float;\n" +
"varying highp vec2 vTextureCoord;\n" +
"uniform sampler2D uTexture;\n" +
"void main(){\n" +
" vec4 color = texture2D(uTexture, vTextureCoord);\n" +
" gl_FragColor = color;\n" +
"}";
private static String FRAGMENTSHADER_CAMERA2D = "" +
"#extension GL_OES_EGL_image_external : require\n" +
"precision highp float;\n" +
"varying highp vec2 vTextureCoord;\n" +
"uniform samplerExternalOES uTexture;\n" +
"void main(){\n" +
" vec4 color = texture2D(uTexture, vTextureCoord);\n" +
" gl_FragColor = color;\n" +
"}";
private static String FRAGMENTSHADER_2D = "" +
"precision highp float;\n" +
"varying highp vec2 vTextureCoord;\n" +
"uniform sampler2D uTexture;\n" +
"void main(){\n" +
" vec4 color = texture2D(uTexture, vTextureCoord);\n" +
" gl_FragColor = color;\n" +
"}";
private static short drawIndices[] = {0, 1, 2, 0, 2, 3};
private static float SquareVertices[] = {
-1.0f, 1.0f,
-1.0f, -1.0f,
1.0f, -1.0f,
1.0f, 1.0f};
private static float CamTextureVertices[] = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f};
private static float Cam2dTextureVertices[] = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f};
private static float Cam2dTextureVertices_90[] = {
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f};
private static float Cam2dTextureVertices_180[] = {
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 1.0f,
0.0f, 0.0f};
private static float Cam2dTextureVertices_270[] = {
1.0f, 1.0f,
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f};
public static float MediaCodecTextureVertices[] = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f};
//镜像后的
// public static float MediaCodecTextureVertices[] = {
// 1.0f, 1.0f,
// 1.0f, 0.0f,
// 0.0f, 0.0f,
// 0.0f, 1.0f};
private static float ScreenTextureVertices[] = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 0.0f,
1.0f, 1.0f};
// private static float ScreenTextureVertices[] = {
// 1.0f, 1.0f,
// 1.0f, 0.0f,
// 0.0f, 0.0f,
// 0.0f, 1.0f};
public static int FLOAT_SIZE_BYTES = 4;
public static int SHORT_SIZE_BYTES = 2;
public static int COORDS_PER_VERTEX = 2;
public static int TEXTURE_COORDS_PER_VERTEX = 2;
public static void initOffScreenGL(OffScreenGLWapper wapper) {
wapper.eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (EGL14.EGL_NO_DISPLAY == wapper.eglDisplay) {
throw new RuntimeException("eglGetDisplay,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int versions[] = new int[2];
if (!EGL14.eglInitialize(wapper.eglDisplay, versions, 0, versions, 1)) {
throw new RuntimeException("eglInitialize,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int configsCount[] = new int[1];
EGLConfig configs[] = new EGLConfig[1];
int configSpec[] = new int[]{
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_DEPTH_SIZE, 0,
EGL14.EGL_STENCIL_SIZE, 0,
EGL14.EGL_NONE
};
EGL14.eglChooseConfig(wapper.eglDisplay, configSpec, 0, configs, 0, 1, configsCount, 0);
if (configsCount[0] <= 0) {
throw new RuntimeException("eglChooseConfig,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
wapper.eglConfig = configs[0];
int[] surfaceAttribs = {
EGL10.EGL_WIDTH, 1,
EGL10.EGL_HEIGHT, 1,
EGL14.EGL_NONE
};
int contextSpec[] = new int[]{
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
wapper.eglContext = EGL14.eglCreateContext(wapper.eglDisplay, wapper.eglConfig, EGL14.EGL_NO_CONTEXT, contextSpec, 0);
if (EGL14.EGL_NO_CONTEXT == wapper.eglContext) {
throw new RuntimeException("eglCreateContext,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int[] values = new int[1];
EGL14.eglQueryContext(wapper.eglDisplay, wapper.eglContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, values, 0);
wapper.eglSurface = EGL14.eglCreatePbufferSurface(wapper.eglDisplay, wapper.eglConfig, surfaceAttribs, 0);
if (null == wapper.eglSurface || EGL14.EGL_NO_SURFACE == wapper.eglSurface) {
throw new RuntimeException("eglCreateWindowSurface,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
}
public static void initMediaCodecGL(MediaCodecGLWapper wapper, EGLContext sharedContext, Surface mediaInputSurface) {
wapper.eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (EGL14.EGL_NO_DISPLAY == wapper.eglDisplay) {
throw new RuntimeException("eglGetDisplay,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int versions[] = new int[2];
if (!EGL14.eglInitialize(wapper.eglDisplay, versions, 0, versions, 1)) {
throw new RuntimeException("eglInitialize,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int configsCount[] = new int[1];
EGLConfig configs[] = new EGLConfig[1];
int configSpec[] = new int[]{
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL_RECORDABLE_ANDROID, 1,
EGL14.EGL_DEPTH_SIZE, 0,
EGL14.EGL_STENCIL_SIZE, 0,
EGL14.EGL_NONE
};
EGL14.eglChooseConfig(wapper.eglDisplay, configSpec, 0, configs, 0, 1, configsCount, 0);
if (configsCount[0] <= 0) {
throw new RuntimeException("eglChooseConfig,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
wapper.eglConfig = configs[0];
int[] surfaceAttribs = {
EGL14.EGL_NONE
};
int contextSpec[] = new int[]{
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
wapper.eglContext = EGL14.eglCreateContext(wapper.eglDisplay, wapper.eglConfig, sharedContext, contextSpec, 0);
if (EGL14.EGL_NO_CONTEXT == wapper.eglContext) {
throw new RuntimeException("eglCreateContext,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int[] values = new int[1];
EGL14.eglQueryContext(wapper.eglDisplay, wapper.eglContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, values, 0);
wapper.eglSurface = EGL14.eglCreateWindowSurface(wapper.eglDisplay, wapper.eglConfig, mediaInputSurface, surfaceAttribs, 0);
if (null == wapper.eglSurface || EGL14.EGL_NO_SURFACE == wapper.eglSurface) {
throw new RuntimeException("eglCreateWindowSurface,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
}
public static void initScreenGL(ScreenGLWapper wapper, EGLContext sharedContext, SurfaceTexture screenSurface) {
wapper.eglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (EGL14.EGL_NO_DISPLAY == wapper.eglDisplay) {
throw new RuntimeException("eglGetDisplay,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int versions[] = new int[2];
if (!EGL14.eglInitialize(wapper.eglDisplay, versions, 0, versions, 1)) {
throw new RuntimeException("eglInitialize,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int configsCount[] = new int[1];
EGLConfig configs[] = new EGLConfig[1];
int configSpec[] = new int[]{
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_DEPTH_SIZE, 0,
EGL14.EGL_STENCIL_SIZE, 0,
EGL14.EGL_NONE
};
EGL14.eglChooseConfig(wapper.eglDisplay, configSpec, 0, configs, 0, 1, configsCount, 0);
if (configsCount[0] <= 0) {
throw new RuntimeException("eglChooseConfig,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
wapper.eglConfig = configs[0];
int[] surfaceAttribs = {
EGL14.EGL_NONE
};
int contextSpec[] = new int[]{
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
wapper.eglContext = EGL14.eglCreateContext(wapper.eglDisplay, wapper.eglConfig, sharedContext, contextSpec, 0);
if (EGL14.EGL_NO_CONTEXT == wapper.eglContext) {
throw new RuntimeException("eglCreateContext,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
int[] values = new int[1];
EGL14.eglQueryContext(wapper.eglDisplay, wapper.eglContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, values, 0);
wapper.eglSurface = EGL14.eglCreateWindowSurface(wapper.eglDisplay, wapper.eglConfig, screenSurface, surfaceAttribs, 0);
if (null == wapper.eglSurface || EGL14.EGL_NO_SURFACE == wapper.eglSurface) {
throw new RuntimeException("eglCreateWindowSurface,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
}
public static void makeCurrent(OffScreenGLWapper wapper) {
if (!EGL14.eglMakeCurrent(wapper.eglDisplay, wapper.eglSurface, wapper.eglSurface, wapper.eglContext)) {
throw new RuntimeException("eglMakeCurrent,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
}
public static void makeCurrent(MediaCodecGLWapper wapper) {
if (!EGL14.eglMakeCurrent(wapper.eglDisplay, wapper.eglSurface, wapper.eglSurface, wapper.eglContext)) {
throw new RuntimeException("eglMakeCurrent,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
}
public static void makeCurrent(ScreenGLWapper wapper) {
if (!EGL14.eglMakeCurrent(wapper.eglDisplay, wapper.eglSurface, wapper.eglSurface, wapper.eglContext)) {
throw new RuntimeException("eglMakeCurrent,failed:" + GLUtils.getEGLErrorString(EGL14.eglGetError()));
}
}
public static void createCamFrameBuff(int[] frameBuffer, int[] frameBufferTex, int width, int height) {
GLES20.glGenFramebuffers(1, frameBuffer, 0);
GLES20.glGenTextures(1, frameBufferTex, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, frameBufferTex[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer[0]);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, frameBufferTex[0], 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLESTools.checkGlError("createCamFrameBuff");
}
public static void enableVertex(int posLoc, int texLoc, FloatBuffer shapeBuffer, FloatBuffer texBuffer) {
GLES20.glEnableVertexAttribArray(posLoc);
GLES20.glEnableVertexAttribArray(texLoc);
GLES20.glVertexAttribPointer(posLoc, COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
COORDS_PER_VERTEX * 4, shapeBuffer);
GLES20.glVertexAttribPointer(texLoc, TEXTURE_COORDS_PER_VERTEX,
GLES20.GL_FLOAT, false,
TEXTURE_COORDS_PER_VERTEX * 4, texBuffer);
}
public static void disableVertex(int posLoc, int texLoc) {
GLES20.glDisableVertexAttribArray(posLoc);
GLES20.glDisableVertexAttribArray(texLoc);
}
public static int createCamera2DProgram() {
return GLESTools.createProgram(VERTEXSHADER_CAMERA2D, FRAGMENTSHADER_CAMERA2D);
}
public static int createCameraProgram() {
return GLESTools.createProgram(VERTEXSHADER, FRAGMENTSHADER_CAMERA);
}
public static int createMediaCodecProgram() {
return GLESTools.createProgram(VERTEXSHADER, FRAGMENTSHADER_2D);
}
public static int createScreenProgram() {
return GLESTools.createProgram(VERTEXSHADER, FRAGMENTSHADER_2D);
}
public static ShortBuffer getDrawIndecesBuffer() {
ShortBuffer result = ByteBuffer.allocateDirect(SHORT_SIZE_BYTES * drawIndices.length).
order(ByteOrder.nativeOrder()).
asShortBuffer();
result.put(drawIndices);
result.position(0);
return result;
}
public static FloatBuffer getShapeVerticesBuffer() {
FloatBuffer result = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * SquareVertices.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
result.put(SquareVertices);
result.position(0);
return result;
}
public static FloatBuffer getMediaCodecTextureVerticesBuffer() {
FloatBuffer result = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * MediaCodecTextureVertices.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
result.put(MediaCodecTextureVertices);
result.position(0);
return result;
}
public static FloatBuffer getScreenTextureVerticesBuffer() {
FloatBuffer result = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * ScreenTextureVertices.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
result.put(ScreenTextureVertices);
result.position(0);
return result;
}
public static FloatBuffer getCamera2DTextureVerticesBuffer(final int directionFlag, final float cropRatio) {
if (directionFlag == -1) {
FloatBuffer result = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * Cam2dTextureVertices.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
result.put(CamTextureVertices);
result.position(0);
return result;
}
float[] buffer;
switch (directionFlag & 0xF0) {
case RESCoreParameters.FLAG_DIRECTION_ROATATION_90:
buffer = Cam2dTextureVertices_90.clone();
break;
case RESCoreParameters.FLAG_DIRECTION_ROATATION_180:
buffer = Cam2dTextureVertices_180.clone();
break;
case RESCoreParameters.FLAG_DIRECTION_ROATATION_270:
buffer = Cam2dTextureVertices_270.clone();
break;
default:
buffer = Cam2dTextureVertices.clone();
}
if ((directionFlag & 0xF0) == RESCoreParameters.FLAG_DIRECTION_ROATATION_0 || (directionFlag & 0xF0) == RESCoreParameters.FLAG_DIRECTION_ROATATION_180) {
if (cropRatio > 0) {
buffer[1] = buffer[1] == 1.0f ? (1.0f - cropRatio) : cropRatio;
buffer[3] = buffer[3] == 1.0f ? (1.0f - cropRatio) : cropRatio;
buffer[5] = buffer[5] == 1.0f ? (1.0f - cropRatio) : cropRatio;
buffer[7] = buffer[7] == 1.0f ? (1.0f - cropRatio) : cropRatio;
} else {
buffer[0] = buffer[0] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
buffer[2] = buffer[2] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
buffer[4] = buffer[4] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
buffer[6] = buffer[6] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
}
} else {
if (cropRatio > 0) {
buffer[0] = buffer[0] == 1.0f ? (1.0f - cropRatio) : cropRatio;
buffer[2] = buffer[2] == 1.0f ? (1.0f - cropRatio) : cropRatio;
buffer[4] = buffer[4] == 1.0f ? (1.0f - cropRatio) : cropRatio;
buffer[6] = buffer[6] == 1.0f ? (1.0f - cropRatio) : cropRatio;
} else {
buffer[1] = buffer[1] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
buffer[3] = buffer[3] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
buffer[5] = buffer[5] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
buffer[7] = buffer[7] == 1.0f ? (1.0f + cropRatio) : -cropRatio;
}
}
if ((directionFlag & RESCoreParameters.FLAG_DIRECTION_FLIP_HORIZONTAL) != 0) {
buffer[0] = flip(buffer[0]);
buffer[2] = flip(buffer[2]);
buffer[4] = flip(buffer[4]);
buffer[6] = flip(buffer[6]);
}
if ((directionFlag & RESCoreParameters.FLAG_DIRECTION_FLIP_VERTICAL) != 0) {
buffer[1] = flip(buffer[1]);
buffer[3] = flip(buffer[3]);
buffer[5] = flip(buffer[5]);
buffer[7] = flip(buffer[7]);
}
FloatBuffer result = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * buffer.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
result.put(buffer);
result.position(0);
return result;
}
public static FloatBuffer getCameraTextureVerticesBuffer() {
FloatBuffer result = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * Cam2dTextureVertices.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
result.put(CamTextureVertices);
result.position(0);
return result;
}
private static float flip(final float i) {
return (1.0f - i);
}
public static FloatBuffer adjustTextureFlip(boolean flipHorizontal) {
float[] textureCords = getFlip(flipHorizontal, false);
FloatBuffer mTextureBuffer = null;
if (mTextureBuffer == null) {
mTextureBuffer = ByteBuffer.allocateDirect(textureCords.length * 4)
.order(ByteOrder.nativeOrder())
.asFloatBuffer();
}
mTextureBuffer.clear();
mTextureBuffer.put(textureCords).position(0);
return mTextureBuffer;
}
public static float[] getFlip(final boolean flipHorizontal,
final boolean flipVertical) {
float[] rotatedTex = Cam2dTextureVertices;
if (flipHorizontal) {
rotatedTex = new float[]{
flip2(rotatedTex[0]), rotatedTex[1],
flip2(rotatedTex[2]), rotatedTex[3],
flip2(rotatedTex[4]), rotatedTex[5],
flip2(rotatedTex[6]), rotatedTex[7],
};
}
if (flipVertical) {
rotatedTex = new float[]{
rotatedTex[0], flip2(rotatedTex[1]),
rotatedTex[2], flip2(rotatedTex[3]),
rotatedTex[4], flip2(rotatedTex[5]),
rotatedTex[6], flip2(rotatedTex[7]),
};
}
return rotatedTex;
}
private static float flip2(final float i) {
if (i == 0.0f) {
return 1.0f;
}
return 0.0f;
}
}

View File

@@ -0,0 +1,137 @@
package me.lake.librestreaming.core;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import java.io.IOException;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.tools.LogTools;
public class MediaCodecHelper {
public static MediaCodec createSoftVideoMediaCodec(RESCoreParameters coreParameters, MediaFormat videoFormat) {
videoFormat.setString(MediaFormat.KEY_MIME, "video/avc");
videoFormat.setInteger(MediaFormat.KEY_WIDTH, coreParameters.videoWidth);
videoFormat.setInteger(MediaFormat.KEY_HEIGHT, coreParameters.videoHeight);
videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, coreParameters.mediacdoecAVCBitRate);
videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, coreParameters.mediacodecAVCFrameRate);
videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, coreParameters.mediacodecAVCIFrameInterval);
videoFormat.setInteger(MediaFormat.KEY_PROFILE, MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline);
videoFormat.setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel31);
videoFormat.setInteger(MediaFormat.KEY_BITRATE_MODE, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR);
MediaCodec result = null;
try {
result = MediaCodec.createEncoderByType(videoFormat.getString(MediaFormat.KEY_MIME));
//select color
int[] colorful = result.getCodecInfo().getCapabilitiesForType(videoFormat.getString(MediaFormat.KEY_MIME)).colorFormats;
int dstVideoColorFormat = -1;
//select mediacodec colorformat
if (isArrayContain(colorful, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar)) {
dstVideoColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
coreParameters.mediacodecAVCColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
}
if (dstVideoColorFormat == -1 && isArrayContain(colorful, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar)) {
dstVideoColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar;
coreParameters.mediacodecAVCColorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar;
}
if (dstVideoColorFormat == -1) {
LogTools.e("!!!!!!!!!!!UnSupport,mediaCodecColorFormat");
return null;
}
videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, dstVideoColorFormat);
//selectprofile
// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
// MediaCodecInfo.CodecProfileLevel[] profileLevels = result.getCodecInfo().getCapabilitiesForType(videoFormat.getString(MediaFormat.KEY_MIME)).profileLevels;
// if (isProfileContain(profileLevels, MediaCodecInfo.CodecProfileLevel.AVCProfileMain)) {
// coreParameters.mediacodecAVCProfile = MediaCodecInfo.CodecProfileLevel.AVCProfileMain;
// coreParameters.mediacodecAVClevel = MediaCodecInfo.CodecProfileLevel.AVCLevel31;
// } else {
// coreParameters.mediacodecAVCProfile = MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline;
// coreParameters.mediacodecAVClevel = MediaCodecInfo.CodecProfileLevel.AVCLevel31;
// }
// videoFormat.setInteger(MediaFormat.KEY_PROFILE, coreParameters.mediacodecAVCProfile);
// //level must be set even below M
// videoFormat.setInteger(MediaFormat.KEY_LEVEL, coreParameters.mediacodecAVClevel);
// }
} catch (IOException e) {
LogTools.trace(e);
return null;
}
return result;
}
public static MediaCodec createAudioMediaCodec(RESCoreParameters coreParameters, MediaFormat audioFormat) {
//Audio
MediaCodec result;
audioFormat.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, coreParameters.mediacodecAACProfile);
audioFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, coreParameters.mediacodecAACSampleRate);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, coreParameters.mediacodecAACChannelCount);
audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, coreParameters.mediacodecAACBitRate);
audioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, coreParameters.mediacodecAACMaxInputSize);
LogTools.d("creatingAudioEncoder,format=" + audioFormat.toString());
try {
result = MediaCodec.createEncoderByType(audioFormat.getString(MediaFormat.KEY_MIME));
} catch (Exception e) {
LogTools.trace("can`t create audioEncoder!", e);
return null;
}
return result;
}
public static MediaCodec createHardVideoMediaCodec(RESCoreParameters coreParameters, MediaFormat videoFormat) {
videoFormat.setString(MediaFormat.KEY_MIME, "video/avc");
videoFormat.setInteger(MediaFormat.KEY_WIDTH, coreParameters.videoWidth);
videoFormat.setInteger(MediaFormat.KEY_HEIGHT, coreParameters.videoHeight);
videoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
videoFormat.setInteger(MediaFormat.KEY_BIT_RATE, coreParameters.mediacdoecAVCBitRate);
videoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, coreParameters.mediacodecAVCFrameRate);
videoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, coreParameters.mediacodecAVCIFrameInterval);
videoFormat.setInteger(MediaFormat.KEY_PROFILE, MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline);
videoFormat.setInteger(MediaFormat.KEY_LEVEL, MediaCodecInfo.CodecProfileLevel.AVCLevel31);
videoFormat.setInteger(MediaFormat.KEY_BITRATE_MODE, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR);
videoFormat.setInteger(MediaFormat.KEY_COMPLEXITY, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR);//added by wangshuo
MediaCodec result = null;
try {
result = MediaCodec.createEncoderByType(videoFormat.getString(MediaFormat.KEY_MIME));
//selectprofile
// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
// MediaCodecInfo.CodecProfileLevel[] profileLevels = result.getCodecInfo().getCapabilitiesForType(videoFormat.getString(MediaFormat.KEY_MIME)).profileLevels;
// if (isProfileContain(profileLevels, MediaCodecInfo.CodecProfileLevel.AVCProfileMain)) {
// coreParameters.mediacodecAVCProfile = MediaCodecInfo.CodecProfileLevel.AVCProfileMain;
// coreParameters.mediacodecAVClevel = MediaCodecInfo.CodecProfileLevel.AVCLevel31;
// } else {
// coreParameters.mediacodecAVCProfile = MediaCodecInfo.CodecProfileLevel.AVCProfileBaseline;
// coreParameters.mediacodecAVClevel = MediaCodecInfo.CodecProfileLevel.AVCLevel31;
// }
// videoFormat.setInteger(MediaFormat.KEY_PROFILE, coreParameters.mediacodecAVCProfile);
// //level must be set even below M
// videoFormat.setInteger(MediaFormat.KEY_LEVEL, coreParameters.mediacodecAVClevel);
// }
} catch (IOException e) {
LogTools.trace(e);
return null;
}
return result;
}
private static boolean isArrayContain(int[] src, int target) {
for (int color : src) {
if (color == target) {
return true;
}
}
return false;
}
private static boolean isProfileContain(MediaCodecInfo.CodecProfileLevel[] src, int target) {
for (MediaCodecInfo.CodecProfileLevel color : src) {
if (color.profile == target) {
return true;
}
}
return false;
}
}

View File

@@ -0,0 +1,88 @@
package me.lake.librestreaming.core;
import android.media.MediaFormat;
import java.nio.ByteBuffer;
import me.lake.librestreaming.tools.ByteArrayTools;
public class Packager {
public static class H264Packager {
public static byte[] generateAVCDecoderConfigurationRecord(MediaFormat mediaFormat) {
ByteBuffer SPSByteBuff = mediaFormat.getByteBuffer("csd-0");
SPSByteBuff.position(4);
ByteBuffer PPSByteBuff = mediaFormat.getByteBuffer("csd-1");
PPSByteBuff.position(4);
int spslength = SPSByteBuff.remaining();
int ppslength = PPSByteBuff.remaining();
int length = 11 + spslength + ppslength;
byte[] result = new byte[length];
SPSByteBuff.get(result, 8, spslength);
PPSByteBuff.get(result, 8 + spslength + 3, ppslength);
/**
* UB[8]configurationVersion
* UB[8]AVCProfileIndication
* UB[8]profile_compatibility
* UB[8]AVCLevelIndication
* UB[8]lengthSizeMinusOne
*/
result[0] = 0x01;
result[1] = result[9];
result[2] = result[10];
result[3] = result[11];
result[4] = (byte) 0xFF;
/**
* UB[8]numOfSequenceParameterSets
* UB[16]sequenceParameterSetLength
*/
result[5] = (byte) 0xE1;
ByteArrayTools.intToByteArrayTwoByte(result, 6, spslength);
/**
* UB[8]numOfPictureParameterSets
* UB[16]pictureParameterSetLength
*/
int pos = 8 + spslength;
result[pos] = (byte) 0x01;
ByteArrayTools.intToByteArrayTwoByte(result, pos + 1, ppslength);
return result;
}
}
public static class FLVPackager {
public static final int FLV_TAG_LENGTH = 11;
public static final int FLV_VIDEO_TAG_LENGTH = 5;
public static final int FLV_AUDIO_TAG_LENGTH = 2;
public static final int FLV_TAG_FOOTER_LENGTH = 4;
public static final int NALU_HEADER_LENGTH = 4;
public static void fillFlvVideoTag(byte[] dst, int pos, boolean isAVCSequenceHeader, boolean isIDR, int readDataLength) {
//FrameType&CodecID
dst[pos] = isIDR ? (byte) 0x17 : (byte) 0x27;
//AVCPacketType
dst[pos + 1] = isAVCSequenceHeader ? (byte) 0x00 : (byte) 0x01;
//LAKETODO CompositionTime
dst[pos + 2] = 0x00;
dst[pos + 3] = 0x00;
dst[pos + 4] = 0x00;
if (!isAVCSequenceHeader) {
//NALU HEADER
ByteArrayTools.intToByteArrayFull(dst, pos + 5, readDataLength);
}
}
public static void fillFlvAudioTag(byte[] dst, int pos, boolean isAACSequenceHeader) {
/**
* UB[4] 10=AAC
* UB[2] 3=44kHz
* UB[1] 1=16-bit
* UB[1] 0=MonoSound
*/
dst[pos] = (byte) 0xAE;
dst[pos + 1] = isAACSequenceHeader ? (byte) 0x00 : (byte) 0x01;
}
}
}

View File

@@ -0,0 +1,57 @@
package me.lake.librestreaming.core;
import java.util.LinkedList;
public class RESByteSpeedometer {
private int timeGranularity;
private LinkedList<ByteFrame> byteList;
private final Object syncByteList = new Object();
public RESByteSpeedometer(int timeGranularity) {
this.timeGranularity = timeGranularity;
byteList = new LinkedList<>();
}
public int getSpeed() {
synchronized (syncByteList) {
long now = System.currentTimeMillis();
trim(now);
long sumByte = 0;
for (ByteFrame byteFrame : byteList) {
sumByte += byteFrame.bytenum;
}
return (int) (sumByte * 1000 / timeGranularity);
}
}
public void gain(int byteCount) {
synchronized (syncByteList) {
long now = System.currentTimeMillis();
byteList.addLast(new ByteFrame(now, byteCount));
trim(now);
}
}
private void trim(long time) {
while (!byteList.isEmpty() && (time - byteList.getFirst().time) > timeGranularity) {
byteList.removeFirst();
}
}
public void reset() {
synchronized (syncByteList) {
byteList.clear();
}
}
private class ByteFrame {
long time;
long bytenum;
public ByteFrame(long time, long bytenum) {
this.time = time;
this.bytenum = bytenum;
}
}
}

View File

@@ -0,0 +1,44 @@
package me.lake.librestreaming.core;
public class RESFrameRateMeter {
private static final long TIMETRAVEL = 1;
private static final long TIMETRAVEL_MS = TIMETRAVEL * 1000;
private static final long GET_TIMETRAVEL_MS = 2 * TIMETRAVEL_MS;
private int times;
private float lastFps;
private long lastUpdateTime;
public RESFrameRateMeter() {
times = 0;
lastFps = 0;
lastUpdateTime = 0;
}
public void count() {
long now = System.currentTimeMillis();
if (lastUpdateTime == 0) {
lastUpdateTime = now;
}
if ((now - lastUpdateTime) > TIMETRAVEL_MS) {
lastFps = (((float) times) / (now - lastUpdateTime)) * 1000.0f;
lastUpdateTime = now;
times = 0;
}
++times;
}
public float getFps() {
if ((System.currentTimeMillis() - lastUpdateTime) > GET_TIMETRAVEL_MS) {
return 0;
} else {
return lastFps;
}
}
public void reSet() {
times = 0;
lastFps = 0;
lastUpdateTime = 0;
}
}

View File

@@ -0,0 +1,891 @@
package me.lake.librestreaming.core;
import android.annotation.TargetApi;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.opengl.EGL14;
import android.opengl.EGLExt;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.os.SystemClock;
import android.view.Surface;
import java.nio.Buffer;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.nio.ShortBuffer;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import me.lake.librestreaming.client.CallbackDelivery;
import me.lake.librestreaming.core.listener.RESScreenShotListener;
import me.lake.librestreaming.core.listener.RESVideoChangeListener;
import me.lake.librestreaming.encoder.MediaVideoEncoder;
import me.lake.librestreaming.filter.hardvideofilter.BaseHardVideoFilter;
import me.lake.librestreaming.model.MediaCodecGLWapper;
import me.lake.librestreaming.model.OffScreenGLWapper;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.model.ScreenGLWapper;
import me.lake.librestreaming.model.Size;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.tools.LogTools;
public class RESHardVideoCore implements RESVideoCore {
RESCoreParameters resCoreParameters;
private final Object syncOp = new Object();
//filter
private Lock lockVideoFilter = null;
private BaseHardVideoFilter videoFilter;
private MediaCodec dstVideoEncoder;
private MediaFormat dstVideoFormat;
private final Object syncPreview = new Object();
private HandlerThread videoGLHandlerThread;
private VideoGLHandler videoGLHander;
final private Object syncResScreenShotListener = new Object();
private RESScreenShotListener resScreenShotListener;
final private Object syncResVideoChangeListener = new Object();
private RESVideoChangeListener resVideoChangeListener;
private final Object syncIsLooping = new Object();
private boolean isPreviewing = false;
private boolean isStreaming = false;
private int loopingInterval;
private boolean isEnableMirror;
private boolean isEnablePreviewMirror;
private boolean isEnableStreamMirror;
public RESHardVideoCore(RESCoreParameters parameters) {
resCoreParameters = parameters;
lockVideoFilter = new ReentrantLock(false);
}
public void onFrameAvailable() {
if (videoGLHandlerThread != null) {
videoGLHander.addFrameNum();
}
}
@Override
public boolean prepare(RESConfig resConfig) {
synchronized (syncOp) {
resCoreParameters.renderingMode = resConfig.getRenderingMode();
resCoreParameters.mediacdoecAVCBitRate = resConfig.getBitRate();
resCoreParameters.videoBufferQueueNum = resConfig.getVideoBufferQueueNum();
resCoreParameters.mediacodecAVCIFrameInterval = resConfig.getVideoGOP();
resCoreParameters.mediacodecAVCFrameRate = resCoreParameters.videoFPS;
loopingInterval = 1000 / resCoreParameters.videoFPS;
dstVideoFormat = new MediaFormat();
videoGLHandlerThread = new HandlerThread("GLThread");
videoGLHandlerThread.start();
videoGLHander = new VideoGLHandler(videoGLHandlerThread.getLooper());
videoGLHander.sendEmptyMessage(VideoGLHandler.WHAT_INIT);
return true;
}
}
@Override
public void startPreview(SurfaceTexture surfaceTexture, int visualWidth, int visualHeight) {
synchronized (syncOp) {
videoGLHander.sendMessage(videoGLHander.obtainMessage(VideoGLHandler.WHAT_START_PREVIEW,
visualWidth, visualHeight, surfaceTexture));
synchronized (syncIsLooping) {
if (!isPreviewing && !isStreaming) {
videoGLHander.removeMessages(VideoGLHandler.WHAT_DRAW);
videoGLHander.sendMessageDelayed(videoGLHander.obtainMessage(VideoGLHandler.WHAT_DRAW, SystemClock.uptimeMillis() + loopingInterval), loopingInterval);
}
isPreviewing = true;
}
}
}
@Override
public void updatePreview(int visualWidth, int visualHeight) {
synchronized (syncOp) {
synchronized (syncPreview) {
videoGLHander.updatePreview(visualWidth, visualHeight);
}
}
}
@Override
public void stopPreview(boolean releaseTexture) {
synchronized (syncOp) {
videoGLHander.sendMessage(videoGLHander.obtainMessage(VideoGLHandler.WHAT_STOP_PREVIEW, releaseTexture));
synchronized (syncIsLooping) {
isPreviewing = false;
}
}
}
@Override
public boolean startStreaming(RESFlvDataCollecter flvDataCollecter) {
synchronized (syncOp) {
videoGLHander.sendMessage(videoGLHander.obtainMessage(VideoGLHandler.WHAT_START_STREAMING, flvDataCollecter));
synchronized (syncIsLooping) {
if (!isPreviewing && !isStreaming) {
videoGLHander.removeMessages(VideoGLHandler.WHAT_DRAW);
videoGLHander.sendMessageDelayed(videoGLHander.obtainMessage(VideoGLHandler.WHAT_DRAW, SystemClock.uptimeMillis() + loopingInterval), loopingInterval);
}
isStreaming = true;
}
}
return true;
}
@Override
public void updateCamTexture(SurfaceTexture camTex) {
synchronized (syncOp) {
if (videoGLHander != null) {
videoGLHander.updateCamTexture(camTex);
}
}
}
@Override
public boolean stopStreaming() {
synchronized (syncOp) {
videoGLHander.sendEmptyMessage(VideoGLHandler.WHAT_STOP_STREAMING);
synchronized (syncIsLooping) {
isStreaming = false;
}
}
return true;
}
@Override
public boolean destroy() {
synchronized (syncOp) {
videoGLHander.sendEmptyMessage(VideoGLHandler.WHAT_UNINIT);
videoGLHandlerThread.quitSafely();
try {
videoGLHandlerThread.join();
} catch (InterruptedException ignored) {
}
videoGLHandlerThread = null;
videoGLHander = null;
return true;
}
}
@TargetApi(Build.VERSION_CODES.KITKAT)
@Override
public void reSetVideoBitrate(int bitrate) {
synchronized (syncOp) {
if (videoGLHander != null) {
videoGLHander.sendMessage(videoGLHander.obtainMessage(VideoGLHandler.WHAT_RESET_BITRATE, bitrate, 0));
resCoreParameters.mediacdoecAVCBitRate = bitrate;
dstVideoFormat.setInteger(MediaFormat.KEY_BIT_RATE, resCoreParameters.mediacdoecAVCBitRate);
}
}
}
@TargetApi(Build.VERSION_CODES.KITKAT)
@Override
public int getVideoBitrate() {
synchronized (syncOp) {
return resCoreParameters.mediacdoecAVCBitRate;
}
}
@Override
public void reSetVideoFPS(int fps) {
synchronized (syncOp) {
resCoreParameters.videoFPS = fps;
loopingInterval = 1000 / resCoreParameters.videoFPS;
}
}
@Override
public void reSetVideoSize(RESCoreParameters newParameters) {
synchronized (syncOp) {
synchronized (syncIsLooping) {
if (isPreviewing || isStreaming) {
videoGLHander.sendMessage(videoGLHander.obtainMessage(VideoGLHandler.WHAT_RESET_VIDEO, newParameters));
}
}
}
}
@Override
public void setCurrentCamera(int cameraIndex) {
mCameraId = cameraIndex;
synchronized (syncOp) {
if (videoGLHander != null) {
videoGLHander.updateCameraIndex(cameraIndex);
}
}
}
public BaseHardVideoFilter acquireVideoFilter() {
lockVideoFilter.lock();
return videoFilter;
}
public void releaseVideoFilter() {
lockVideoFilter.unlock();
}
public void setVideoFilter(BaseHardVideoFilter baseHardVideoFilter) {
lockVideoFilter.lock();
videoFilter = baseHardVideoFilter;
lockVideoFilter.unlock();
}
@Override
public void takeScreenShot(RESScreenShotListener listener) {
synchronized (syncResScreenShotListener) {
resScreenShotListener = listener;
}
}
public void setMirror(boolean isEnableMirror,boolean isEnablePreviewMirror,boolean isEnableStreamMirror) {
this.isEnableMirror = isEnableMirror;
this.isEnablePreviewMirror = isEnablePreviewMirror;
this.isEnableStreamMirror = isEnableStreamMirror;
}
@Override
public void setVideoChangeListener(RESVideoChangeListener listener) {
synchronized (syncResVideoChangeListener) {
resVideoChangeListener = listener;
}
}
@Override
public float getDrawFrameRate() {
synchronized (syncOp) {
return videoGLHander == null ? 0 : videoGLHander.getDrawFrameRate();
}
}
private class VideoGLHandler extends Handler {
static final int WHAT_INIT = 0x001;
static final int WHAT_UNINIT = 0x002;
static final int WHAT_FRAME = 0x003;
static final int WHAT_DRAW = 0x004;
static final int WHAT_RESET_VIDEO = 0x005;
static final int WHAT_START_PREVIEW = 0x010;
static final int WHAT_STOP_PREVIEW = 0x020;
static final int WHAT_START_STREAMING = 0x100;
static final int WHAT_STOP_STREAMING = 0x200;
static final int WHAT_RESET_BITRATE = 0x300;
private Size screenSize;
//=========================
public static final int FILTER_LOCK_TOLERATION = 3;//3ms
private final Object syncFrameNum = new Object();
private int frameNum = 0;
//gl stuff
private final Object syncCameraTex = new Object();
private SurfaceTexture cameraTexture;
private SurfaceTexture screenTexture;
private MediaCodecGLWapper mediaCodecGLWapper;
private ScreenGLWapper screenGLWapper;
private OffScreenGLWapper offScreenGLWapper;
private int sample2DFrameBuffer;
private int sample2DFrameBufferTexture;
private int frameBuffer;
private int frameBufferTexture;
private FloatBuffer shapeVerticesBuffer;
private FloatBuffer mediaCodecTextureVerticesBuffer;
private FloatBuffer screenTextureVerticesBuffer;
private int currCamera;
private final Object syncCameraTextureVerticesBuffer = new Object();
private FloatBuffer camera2dTextureVerticesBuffer;
private FloatBuffer cameraTextureVerticesBuffer;
private ShortBuffer drawIndecesBuffer;
private BaseHardVideoFilter innerVideoFilter = null;
private RESFrameRateMeter drawFrameRateMeter;
private int directionFlag;
//sender
private VideoSenderThread videoSenderThread;
boolean hasNewFrame = false;
public boolean dropNextFrame = false;
float[] textureMatrix;
public VideoGLHandler(Looper looper) {
super(looper);
screenGLWapper = null;
mediaCodecGLWapper = null;
drawFrameRateMeter = new RESFrameRateMeter();
screenSize = new Size(1, 1);
initBuffer();
}
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case WHAT_FRAME: {
GLHelper.makeCurrent(offScreenGLWapper);
synchronized (syncFrameNum) {
synchronized (syncCameraTex) {
if (cameraTexture != null) {
while (frameNum != 0) {
cameraTexture.updateTexImage();
--frameNum;
if (!dropNextFrame) {
hasNewFrame = true;
} else {
dropNextFrame = false;
hasNewFrame=false;
}
}
} else {
break;
}
}
}
drawSample2DFrameBuffer(cameraTexture);
}
break;
case WHAT_DRAW: {
long time = (Long) msg.obj;
long interval = time + loopingInterval - SystemClock.uptimeMillis();
synchronized (syncIsLooping) {
if (isPreviewing || isStreaming) {
if (interval > 0) {
videoGLHander.sendMessageDelayed(videoGLHander.obtainMessage(
VideoGLHandler.WHAT_DRAW,
SystemClock.uptimeMillis() + interval),
interval);
} else {
videoGLHander.sendMessage(videoGLHander.obtainMessage(
VideoGLHandler.WHAT_DRAW,
SystemClock.uptimeMillis() + loopingInterval));
}
}
}
if (hasNewFrame) {
drawFrameBuffer();
drawMediaCodec(time * 1000000);
drawScreen();
encoderMp4(frameBufferTexture);//编码MP4
drawFrameRateMeter.count();
hasNewFrame = false;
}
}
break;
case WHAT_INIT: {
initOffScreenGL();
}
break;
case WHAT_UNINIT: {
lockVideoFilter.lock();
if (innerVideoFilter != null) {
innerVideoFilter.onDestroy();
innerVideoFilter = null;
}
lockVideoFilter.unlock();
uninitOffScreenGL();
}
break;
case WHAT_START_PREVIEW: {
initScreenGL((SurfaceTexture) msg.obj);
updatePreview(msg.arg1, msg.arg2);
}
break;
case WHAT_STOP_PREVIEW: {
uninitScreenGL();
boolean releaseTexture = (boolean) msg.obj;
if (releaseTexture) {
screenTexture.release();
screenTexture = null;
}
}
break;
case WHAT_START_STREAMING: {
if (dstVideoEncoder == null) {
dstVideoEncoder = MediaCodecHelper.createHardVideoMediaCodec(resCoreParameters, dstVideoFormat);
if (dstVideoEncoder == null) {
throw new RuntimeException("create Video MediaCodec failed");
}
}
dstVideoEncoder.configure(dstVideoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
initMediaCodecGL(dstVideoEncoder.createInputSurface());
dstVideoEncoder.start();
videoSenderThread = new VideoSenderThread("VideoSenderThread", dstVideoEncoder, (RESFlvDataCollecter) msg.obj);
videoSenderThread.start();
}
break;
case WHAT_STOP_STREAMING: {
videoSenderThread.quit();
try {
videoSenderThread.join();
} catch (InterruptedException e) {
LogTools.trace("RESHardVideoCore,stopStreaming()failed", e);
}
videoSenderThread = null;
uninitMediaCodecGL();
dstVideoEncoder.stop();
dstVideoEncoder.release();
dstVideoEncoder = null;
}
break;
case WHAT_RESET_BITRATE: {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT && mediaCodecGLWapper != null) {
Bundle bitrateBundle = new Bundle();
bitrateBundle.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, msg.arg1);
dstVideoEncoder.setParameters(bitrateBundle);
}
}
break;
case WHAT_RESET_VIDEO: {
RESCoreParameters newParameters = (RESCoreParameters) msg.obj;
resCoreParameters.videoWidth = newParameters.videoWidth;
resCoreParameters.videoHeight = newParameters.videoHeight;
resCoreParameters.cropRatio = newParameters.cropRatio;
updateCameraIndex(currCamera);
resetFrameBuff();
if (mediaCodecGLWapper != null) {
uninitMediaCodecGL();
dstVideoEncoder.stop();
dstVideoEncoder.release();
dstVideoEncoder = MediaCodecHelper.createHardVideoMediaCodec(resCoreParameters, dstVideoFormat);
if (dstVideoEncoder == null) {
throw new RuntimeException("create Video MediaCodec failed");
}
dstVideoEncoder.configure(dstVideoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
initMediaCodecGL(dstVideoEncoder.createInputSurface());
dstVideoEncoder.start();
videoSenderThread.updateMediaCodec(dstVideoEncoder);
}
synchronized (syncResVideoChangeListener) {
if(resVideoChangeListener!=null) {
CallbackDelivery.i().post(new RESVideoChangeListener.RESVideoChangeRunable(resVideoChangeListener,
resCoreParameters.videoWidth,
resCoreParameters.videoHeight));
}
}
}
break;
default:
}
}
private void drawSample2DFrameBuffer(SurfaceTexture cameraTexture) {
if(isEnableMirror){
screenTextureVerticesBuffer = GLHelper.adjustTextureFlip(isEnablePreviewMirror);
mediaCodecTextureVerticesBuffer = GLHelper.adjustTextureFlip(isEnableStreamMirror);
}
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, sample2DFrameBuffer);
GLES20.glUseProgram(offScreenGLWapper.cam2dProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, OVERWATCH_TEXTURE_ID);
GLES20.glUniform1i(offScreenGLWapper.cam2dTextureLoc, 0);
synchronized (syncCameraTextureVerticesBuffer) {
GLHelper.enableVertex(offScreenGLWapper.cam2dPostionLoc, offScreenGLWapper.cam2dTextureCoordLoc,
shapeVerticesBuffer, camera2dTextureVerticesBuffer);
}
textureMatrix = new float[16];
cameraTexture.getTransformMatrix(textureMatrix);
//encoder mp4 start
//processStMatrix(textureMatrix, mCameraID == Camera.CameraInfo.CAMERA_FACING_FRONT);
//encoder mp4 end
GLES20.glUniformMatrix4fv(offScreenGLWapper.cam2dTextureMatrix, 1, false, textureMatrix, 0);
GLES20.glViewport(0, 0, resCoreParameters.previewVideoHeight, resCoreParameters.previewVideoWidth);//resCoreParameters.videoWidth, resCoreParameters.videoHeight
doGLDraw();
GLES20.glFinish();
GLHelper.disableVertex(offScreenGLWapper.cam2dPostionLoc, offScreenGLWapper.cam2dTextureCoordLoc);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
GLES20.glUseProgram(0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}
private void drawOriginFrameBuffer() {
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer);
GLES20.glUseProgram(offScreenGLWapper.camProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, sample2DFrameBufferTexture);
GLES20.glUniform1i(offScreenGLWapper.camTextureLoc, 0);
synchronized (syncCameraTextureVerticesBuffer) {
GLHelper.enableVertex(offScreenGLWapper.camPostionLoc, offScreenGLWapper.camTextureCoordLoc,
shapeVerticesBuffer, cameraTextureVerticesBuffer);
}
GLES20.glViewport(0, 0, resCoreParameters.previewVideoHeight, resCoreParameters.previewVideoWidth);
doGLDraw();
GLES20.glFinish();
GLHelper.disableVertex(offScreenGLWapper.camPostionLoc, offScreenGLWapper.camTextureCoordLoc);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glUseProgram(0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}
private void drawFrameBuffer() {
GLHelper.makeCurrent(offScreenGLWapper);
boolean isFilterLocked = lockVideoFilter();
long starttime = System.currentTimeMillis();
if (isFilterLocked) {
if (videoFilter != innerVideoFilter) {
if (innerVideoFilter != null) {
innerVideoFilter.onDestroy();
}
innerVideoFilter = videoFilter;
if (innerVideoFilter != null) {
innerVideoFilter.onInit(resCoreParameters.previewVideoHeight, resCoreParameters.previewVideoWidth);//resCoreParameters.videoWidth, resCoreParameters.videoHeight
}
}
if (innerVideoFilter != null) {
synchronized (syncCameraTextureVerticesBuffer) {
innerVideoFilter.onDirectionUpdate(directionFlag);
innerVideoFilter.onDraw(sample2DFrameBufferTexture, frameBuffer, shapeVerticesBuffer, cameraTextureVerticesBuffer);
}
} else {
drawOriginFrameBuffer();
}
unlockVideoFilter();
} else {
drawOriginFrameBuffer();
}
LogTools.e("滤镜耗时:"+(System.currentTimeMillis()-starttime));
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer);
checkScreenShot();
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}
private void drawMediaCodec(long currTime) {
if (mediaCodecGLWapper != null) {
GLHelper.makeCurrent(mediaCodecGLWapper);
GLES20.glUseProgram(mediaCodecGLWapper.drawProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, frameBufferTexture);
GLES20.glUniform1i(mediaCodecGLWapper.drawTextureLoc, 0);
GLHelper.enableVertex(mediaCodecGLWapper.drawPostionLoc, mediaCodecGLWapper.drawTextureCoordLoc,
shapeVerticesBuffer, mediaCodecTextureVerticesBuffer);
doGLDraw();
GLES20.glFinish();
GLHelper.disableVertex(mediaCodecGLWapper.drawPostionLoc, mediaCodecGLWapper.drawTextureCoordLoc);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glUseProgram(0);
EGLExt.eglPresentationTimeANDROID(mediaCodecGLWapper.eglDisplay, mediaCodecGLWapper.eglSurface, currTime);
if (!EGL14.eglSwapBuffers(mediaCodecGLWapper.eglDisplay, mediaCodecGLWapper.eglSurface)) {
throw new RuntimeException("eglSwapBuffers,failed!");
}
}
}
private void drawScreen() {
if (screenGLWapper != null) {
GLHelper.makeCurrent(screenGLWapper);
GLES20.glUseProgram(screenGLWapper.drawProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, frameBufferTexture);
GLES20.glUniform1i(screenGLWapper.drawTextureLoc, 0);
GLHelper.enableVertex(screenGLWapper.drawPostionLoc, screenGLWapper.drawTextureCoordLoc,
shapeVerticesBuffer, screenTextureVerticesBuffer);
GLES20.glViewport(0, 0, screenSize.getWidth(), screenSize.getHeight());
doGLDraw();
GLES20.glFinish();
GLHelper.disableVertex(screenGLWapper.drawPostionLoc, screenGLWapper.drawTextureCoordLoc);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glUseProgram(0);
if (!EGL14.eglSwapBuffers(screenGLWapper.eglDisplay, screenGLWapper.eglSurface)) {
throw new RuntimeException("eglSwapBuffers,failed!");
}
}
}
private void doGLDraw() {
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawIndecesBuffer.limit(), GLES20.GL_UNSIGNED_SHORT, drawIndecesBuffer);
}
/**
* @return ture if filter locked & filter!=null
*/
private boolean lockVideoFilter() {
try {
return lockVideoFilter.tryLock(FILTER_LOCK_TOLERATION, TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
return false;
}
}
private void unlockVideoFilter() {
lockVideoFilter.unlock();
}
private void checkScreenShot() {
synchronized (syncResScreenShotListener) {
if (resScreenShotListener != null) {
Bitmap result = null;
try {
IntBuffer pixBuffer = IntBuffer.allocate(resCoreParameters.previewVideoHeight * resCoreParameters.previewVideoWidth);
GLES20.glReadPixels(0, 0, resCoreParameters.previewVideoHeight, resCoreParameters.previewVideoWidth, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixBuffer);
int[] glPixel = pixBuffer.array();
int[] argbPixel = new int[resCoreParameters.previewVideoHeight * resCoreParameters.previewVideoWidth];
ColorHelper.FIXGLPIXEL(glPixel, argbPixel,
resCoreParameters.previewVideoHeight,
resCoreParameters.previewVideoWidth
);
result = Bitmap.createBitmap(argbPixel,
resCoreParameters.previewVideoHeight,
resCoreParameters.previewVideoWidth,
Bitmap.Config.ARGB_8888);
if(isEnableMirror && isEnablePreviewMirror){
Matrix mx = new Matrix();
mx.setScale(-1, 1); //产生镜像
result = Bitmap.createBitmap(result,0,0,result.getWidth(),result.getHeight(),mx,true);
}
System.out.println("resCoreParameters.previewVideoWidth = " + resCoreParameters.previewVideoWidth);
System.out.println("resCoreParameters.previewVideoHeight = " + resCoreParameters.previewVideoHeight);
} catch (Exception e) {
LogTools.trace("takescreenshot failed:", e);
} finally {
CallbackDelivery.i().post(new RESScreenShotListener.RESScreenShotListenerRunable(resScreenShotListener, result));
resScreenShotListener = null;
}
}
}
}
private void initOffScreenGL() {
if (offScreenGLWapper == null) {
offScreenGLWapper = new OffScreenGLWapper();
GLHelper.initOffScreenGL(offScreenGLWapper);
GLHelper.makeCurrent(offScreenGLWapper);
//camera
offScreenGLWapper.camProgram = GLHelper.createCameraProgram();
GLES20.glUseProgram(offScreenGLWapper.camProgram);
offScreenGLWapper.camTextureLoc = GLES20.glGetUniformLocation(offScreenGLWapper.camProgram, "uTexture");
offScreenGLWapper.camPostionLoc = GLES20.glGetAttribLocation(offScreenGLWapper.camProgram, "aPosition");
offScreenGLWapper.camTextureCoordLoc = GLES20.glGetAttribLocation(offScreenGLWapper.camProgram, "aTextureCoord");
//camera2d
offScreenGLWapper.cam2dProgram = GLHelper.createCamera2DProgram();
GLES20.glUseProgram(offScreenGLWapper.cam2dProgram);
offScreenGLWapper.cam2dTextureLoc = GLES20.glGetUniformLocation(offScreenGLWapper.cam2dProgram, "uTexture");
offScreenGLWapper.cam2dPostionLoc = GLES20.glGetAttribLocation(offScreenGLWapper.cam2dProgram, "aPosition");
offScreenGLWapper.cam2dTextureCoordLoc = GLES20.glGetAttribLocation(offScreenGLWapper.cam2dProgram, "aTextureCoord");
offScreenGLWapper.cam2dTextureMatrix = GLES20.glGetUniformLocation(offScreenGLWapper.cam2dProgram, "uTextureMatrix");
int[] fb = new int[1], fbt = new int[1];
GLHelper.createCamFrameBuff(fb, fbt, resCoreParameters.previewVideoHeight, resCoreParameters.previewVideoWidth);//resCoreParameters.videoWidth, resCoreParameters.videoHeight
sample2DFrameBuffer = fb[0];
sample2DFrameBufferTexture = fbt[0];
GLHelper.createCamFrameBuff(fb, fbt, resCoreParameters.previewVideoHeight, resCoreParameters.previewVideoWidth);//resCoreParameters.videoWidth, resCoreParameters.videoHeight
frameBuffer = fb[0];
frameBufferTexture = fbt[0];
} else {
throw new IllegalStateException("initOffScreenGL without uninitOffScreenGL");
}
}
private void uninitOffScreenGL() {
if (offScreenGLWapper != null) {
GLHelper.makeCurrent(offScreenGLWapper);
GLES20.glDeleteProgram(offScreenGLWapper.camProgram);
GLES20.glDeleteProgram(offScreenGLWapper.cam2dProgram);
GLES20.glDeleteFramebuffers(1, new int[]{frameBuffer}, 0);
GLES20.glDeleteTextures(1, new int[]{frameBufferTexture}, 0);
GLES20.glDeleteFramebuffers(1, new int[]{sample2DFrameBuffer}, 0);
GLES20.glDeleteTextures(1, new int[]{sample2DFrameBufferTexture}, 0);
EGL14.eglDestroySurface(offScreenGLWapper.eglDisplay, offScreenGLWapper.eglSurface);
EGL14.eglDestroyContext(offScreenGLWapper.eglDisplay, offScreenGLWapper.eglContext);
EGL14.eglTerminate(offScreenGLWapper.eglDisplay);
EGL14.eglMakeCurrent(offScreenGLWapper.eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
} else {
throw new IllegalStateException("uninitOffScreenGL without initOffScreenGL");
}
}
private void initScreenGL(SurfaceTexture screenSurfaceTexture) {
if (screenGLWapper == null) {
screenTexture = screenSurfaceTexture;
screenGLWapper = new ScreenGLWapper();
GLHelper.initScreenGL(screenGLWapper, offScreenGLWapper.eglContext, screenSurfaceTexture);
GLHelper.makeCurrent(screenGLWapper);
screenGLWapper.drawProgram = GLHelper.createScreenProgram();
GLES20.glUseProgram(screenGLWapper.drawProgram);
screenGLWapper.drawTextureLoc = GLES20.glGetUniformLocation(screenGLWapper.drawProgram, "uTexture");
screenGLWapper.drawPostionLoc = GLES20.glGetAttribLocation(screenGLWapper.drawProgram, "aPosition");
screenGLWapper.drawTextureCoordLoc = GLES20.glGetAttribLocation(screenGLWapper.drawProgram, "aTextureCoord");
} else {
throw new IllegalStateException("initScreenGL without unInitScreenGL");
}
}
private void uninitScreenGL() {
if (screenGLWapper != null) {
GLHelper.makeCurrent(screenGLWapper);
GLES20.glDeleteProgram(screenGLWapper.drawProgram);
EGL14.eglDestroySurface(screenGLWapper.eglDisplay, screenGLWapper.eglSurface);
EGL14.eglDestroyContext(screenGLWapper.eglDisplay, screenGLWapper.eglContext);
EGL14.eglTerminate(screenGLWapper.eglDisplay);
EGL14.eglMakeCurrent(screenGLWapper.eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
screenGLWapper = null;
} else {
throw new IllegalStateException("unInitScreenGL without initScreenGL");
}
}
private void initMediaCodecGL(Surface mediacodecSurface) {
if (mediaCodecGLWapper == null) {
mediaCodecGLWapper = new MediaCodecGLWapper();
GLHelper.initMediaCodecGL(mediaCodecGLWapper, offScreenGLWapper.eglContext, mediacodecSurface);
GLHelper.makeCurrent(mediaCodecGLWapper);
GLES20.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
mediaCodecGLWapper.drawProgram = GLHelper.createMediaCodecProgram();
GLES20.glUseProgram(mediaCodecGLWapper.drawProgram);
mediaCodecGLWapper.drawTextureLoc = GLES20.glGetUniformLocation(mediaCodecGLWapper.drawProgram, "uTexture");
mediaCodecGLWapper.drawPostionLoc = GLES20.glGetAttribLocation(mediaCodecGLWapper.drawProgram, "aPosition");
mediaCodecGLWapper.drawTextureCoordLoc = GLES20.glGetAttribLocation(mediaCodecGLWapper.drawProgram, "aTextureCoord");
} else {
throw new IllegalStateException("initMediaCodecGL without uninitMediaCodecGL");
}
}
private void uninitMediaCodecGL() {
if (mediaCodecGLWapper != null) {
GLHelper.makeCurrent(mediaCodecGLWapper);
GLES20.glDeleteProgram(mediaCodecGLWapper.drawProgram);
EGL14.eglDestroySurface(mediaCodecGLWapper.eglDisplay, mediaCodecGLWapper.eglSurface);
EGL14.eglDestroyContext(mediaCodecGLWapper.eglDisplay, mediaCodecGLWapper.eglContext);
EGL14.eglTerminate(mediaCodecGLWapper.eglDisplay);
EGL14.eglMakeCurrent(mediaCodecGLWapper.eglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
mediaCodecGLWapper = null;
} else {
throw new IllegalStateException("uninitMediaCodecGL without initMediaCodecGL");
}
}
private void resetFrameBuff() {
GLHelper.makeCurrent(offScreenGLWapper);
GLES20.glDeleteFramebuffers(1, new int[]{frameBuffer}, 0);
GLES20.glDeleteTextures(1, new int[]{frameBufferTexture}, 0);
GLES20.glDeleteFramebuffers(1, new int[]{sample2DFrameBuffer}, 0);
GLES20.glDeleteTextures(1, new int[]{sample2DFrameBufferTexture}, 0);
int[] fb = new int[1], fbt = new int[1];
GLHelper.createCamFrameBuff(fb, fbt, resCoreParameters.videoWidth, resCoreParameters.videoHeight);
sample2DFrameBuffer = fb[0];
sample2DFrameBufferTexture = fbt[0];
GLHelper.createCamFrameBuff(fb, fbt, resCoreParameters.videoWidth, resCoreParameters.videoHeight);
frameBuffer = fb[0];
frameBufferTexture = fbt[0];
}
private void initBuffer() {
shapeVerticesBuffer = GLHelper.getShapeVerticesBuffer();
mediaCodecTextureVerticesBuffer = GLHelper.getMediaCodecTextureVerticesBuffer();
screenTextureVerticesBuffer = GLHelper.getScreenTextureVerticesBuffer();
updateCameraIndex(currCamera);
drawIndecesBuffer = GLHelper.getDrawIndecesBuffer();
cameraTextureVerticesBuffer = GLHelper.getCameraTextureVerticesBuffer();
}
public void updateCameraIndex(int cameraIndex) {
synchronized (syncCameraTextureVerticesBuffer) {
currCamera = cameraIndex;
if (currCamera == Camera.CameraInfo.CAMERA_FACING_FRONT) {
directionFlag = resCoreParameters.frontCameraDirectionMode ^ RESConfig.DirectionMode.FLAG_DIRECTION_FLIP_HORIZONTAL;
} else {
directionFlag = resCoreParameters.backCameraDirectionMode ^ RESConfig.DirectionMode.FLAG_DIRECTION_ROATATION_0;
}
camera2dTextureVerticesBuffer = GLHelper.getCamera2DTextureVerticesBuffer(directionFlag, resCoreParameters.cropRatio);
}
}
public float getDrawFrameRate() {
return drawFrameRateMeter.getFps();
}
public void updateCamTexture(SurfaceTexture surfaceTexture) {
synchronized (syncCameraTex) {
if (surfaceTexture != cameraTexture) {
cameraTexture = surfaceTexture;
frameNum = 0;
dropNextFrame = true;
}
}
}
public void addFrameNum() {
synchronized (syncFrameNum) {
++frameNum;
this.removeMessages(WHAT_FRAME);
this.sendMessageAtFrontOfQueue(this.obtainMessage(VideoGLHandler.WHAT_FRAME));
}
}
public void updatePreview(int w, int h) {
screenSize = new Size(w, h);
}
public int getBufferTexture(){
return frameBufferTexture;
}
private void encoderMp4(int BufferTexture) {
synchronized (this) {
if (mVideoEncoder != null) {
processStMatrix(textureMatrix, mCameraId == Camera.CameraInfo.CAMERA_FACING_FRONT);
if (mNeedResetEglContext) {
mVideoEncoder.setEglContext(EGL14.eglGetCurrentContext(), videoGLHander.getBufferTexture());
mNeedResetEglContext = false;
}
mVideoEncoder.setPreviewWH(resCoreParameters.previewVideoHeight, resCoreParameters.previewVideoWidth);
mVideoEncoder.frameAvailableSoon(textureMatrix, mVideoEncoder.getMvpMatrix());
}
}
}
}
//encoder mp4 start
private MediaVideoEncoder mVideoEncoder;
private boolean mNeedResetEglContext = true;
private int mCameraId = -1;
public void setVideoEncoder(final MediaVideoEncoder encoder) {
synchronized (this) {
if (encoder != null) {
encoder.setEglContext(EGL14.eglGetCurrentContext(), videoGLHander.getBufferTexture());
}
mVideoEncoder = encoder;
}
}
private void processStMatrix(float[] matrix, boolean needMirror) {
if (needMirror && matrix != null && matrix.length == 16) {
for (int i = 0; i < 3; i++) {
matrix[4 * i] = -matrix[4 * i];
}
if (matrix[4 * 3] == 0) {
matrix[4 * 3] = 1.0f;
} else if (matrix[4 * 3] == 1.0f) {
matrix[4 * 3] = 0f;
}
}
return;
}
public void setNeedResetEglContext(boolean bol){
mNeedResetEglContext = bol;
}
//encoder mp4 end
}

View File

@@ -0,0 +1,232 @@
package me.lake.librestreaming.core;
import android.media.AudioFormat;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.os.SystemClock;
import java.nio.ByteBuffer;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import me.lake.librestreaming.filter.softaudiofilter.BaseSoftAudioFilter;
import me.lake.librestreaming.model.RESAudioBuff;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.tools.LogTools;
public class RESSoftAudioCore {
RESCoreParameters resCoreParameters;
private final Object syncOp = new Object();
private MediaCodec dstAudioEncoder;
private MediaFormat dstAudioFormat;
//filter
private Lock lockAudioFilter = null;
private BaseSoftAudioFilter audioFilter;
//AudioBuffs
//buffers to handle buff from queueAudio
private RESAudioBuff[] orignAudioBuffs;
private int lastAudioQueueBuffIndex;
//buffer to handle buff from orignAudioBuffs
private RESAudioBuff orignAudioBuff;
private RESAudioBuff filteredAudioBuff;
private AudioFilterHandler audioFilterHandler;
private HandlerThread audioFilterHandlerThread;
private AudioSenderThread audioSenderThread;
public RESSoftAudioCore(RESCoreParameters parameters) {
resCoreParameters = parameters;
lockAudioFilter = new ReentrantLock(false);
}
public void queueAudio(byte[] rawAudioFrame) {
int targetIndex = (lastAudioQueueBuffIndex + 1) % orignAudioBuffs.length;
if (orignAudioBuffs[targetIndex].isReadyToFill) {
LogTools.d("queueAudio,accept ,targetIndex" + targetIndex);
System.arraycopy(rawAudioFrame, 0, orignAudioBuffs[targetIndex].buff, 0, resCoreParameters.audioRecoderBufferSize);
orignAudioBuffs[targetIndex].isReadyToFill = false;
lastAudioQueueBuffIndex = targetIndex;
audioFilterHandler.sendMessage(audioFilterHandler.obtainMessage(AudioFilterHandler.WHAT_INCOMING_BUFF, targetIndex, 0));
} else {
LogTools.d("queueAudio,abandon,targetIndex" + targetIndex);
}
}
public boolean prepare(RESConfig resConfig) {
synchronized (syncOp) {
resCoreParameters.mediacodecAACProfile = MediaCodecInfo.CodecProfileLevel.AACObjectLC;
resCoreParameters.mediacodecAACSampleRate = 44100;
resCoreParameters.mediacodecAACChannelCount = 1;
resCoreParameters.mediacodecAACBitRate = 32 * 1024;
resCoreParameters.mediacodecAACMaxInputSize = 8820;
dstAudioFormat = new MediaFormat();
dstAudioEncoder = MediaCodecHelper.createAudioMediaCodec(resCoreParameters, dstAudioFormat);
if (dstAudioEncoder == null) {
LogTools.e("create Audio MediaCodec failed");
return false;
}
//audio
//44100/10=4410,4410*2 = 8820
int audioQueueNum = resCoreParameters.audioBufferQueueNum;
int orignAudioBuffSize = resCoreParameters.mediacodecAACSampleRate / 5;
orignAudioBuffs = new RESAudioBuff[audioQueueNum];
for (int i = 0; i < audioQueueNum; i++) {
orignAudioBuffs[i] = new RESAudioBuff(AudioFormat.ENCODING_PCM_16BIT, orignAudioBuffSize);
}
orignAudioBuff = new RESAudioBuff(AudioFormat.ENCODING_PCM_16BIT, orignAudioBuffSize);
filteredAudioBuff = new RESAudioBuff(AudioFormat.ENCODING_PCM_16BIT, orignAudioBuffSize);
return true;
}
}
public void start(RESFlvDataCollecter flvDataCollecter) {
synchronized (syncOp) {
try {
for (RESAudioBuff buff : orignAudioBuffs) {
buff.isReadyToFill = true;
}
if (dstAudioEncoder == null) {
dstAudioEncoder = MediaCodec.createEncoderByType(dstAudioFormat.getString(MediaFormat.KEY_MIME));
}
dstAudioEncoder.configure(dstAudioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
dstAudioEncoder.start();
lastAudioQueueBuffIndex = 0;
audioFilterHandlerThread = new HandlerThread("audioFilterHandlerThread");
audioSenderThread = new AudioSenderThread("AudioSenderThread", dstAudioEncoder, flvDataCollecter);
audioFilterHandlerThread.start();
audioSenderThread.start();
audioFilterHandler = new AudioFilterHandler(audioFilterHandlerThread.getLooper());
} catch (Exception e) {
LogTools.trace("RESSoftAudioCore", e);
}
}
}
public void stop() {
synchronized (syncOp) {
audioFilterHandler.removeCallbacksAndMessages(null);
audioFilterHandlerThread.quit();
try {
audioFilterHandlerThread.join();
audioSenderThread.quit();
audioSenderThread.join();
} catch (InterruptedException e) {
LogTools.trace("RESSoftAudioCore", e);
}
dstAudioEncoder.stop();
dstAudioEncoder.release();
dstAudioEncoder = null;
}
}
public BaseSoftAudioFilter acquireAudioFilter() {
lockAudioFilter.lock();
return audioFilter;
}
public void releaseAudioFilter() {
lockAudioFilter.unlock();
}
public void setAudioFilter(BaseSoftAudioFilter baseSoftAudioFilter) {
lockAudioFilter.lock();
if (audioFilter != null) {
audioFilter.onDestroy();
}
audioFilter = baseSoftAudioFilter;
if (audioFilter != null) {
audioFilter.onInit(resCoreParameters.mediacodecAACSampleRate / 5);
}
lockAudioFilter.unlock();
}
public void destroy() {
synchronized (syncOp) {
lockAudioFilter.lock();
if (audioFilter != null) {
audioFilter.onDestroy();
}
lockAudioFilter.unlock();
}
}
private class AudioFilterHandler extends Handler {
public static final int FILTER_LOCK_TOLERATION = 3;//3ms
public static final int WHAT_INCOMING_BUFF = 1;
private int sequenceNum;
AudioFilterHandler(Looper looper) {
super(looper);
sequenceNum = 0;
}
@Override
public void handleMessage(Message msg) {
if (msg.what != WHAT_INCOMING_BUFF) {
return;
}
sequenceNum++;
int targetIndex = msg.arg1;
long nowTimeMs = SystemClock.uptimeMillis();
System.arraycopy(orignAudioBuffs[targetIndex].buff, 0,
orignAudioBuff.buff, 0, orignAudioBuff.buff.length);
orignAudioBuffs[targetIndex].isReadyToFill = true;
boolean isFilterLocked = lockAudioFilter();
boolean filtered = false;
if (isFilterLocked) {
filtered = audioFilter.onFrame(orignAudioBuff.buff, filteredAudioBuff.buff, nowTimeMs, sequenceNum);
unlockAudioFilter();
} else {
System.arraycopy(orignAudioBuffs[targetIndex].buff, 0,
orignAudioBuff.buff, 0, orignAudioBuff.buff.length);
orignAudioBuffs[targetIndex].isReadyToFill = true;
}
//orignAudioBuff is ready
int eibIndex = dstAudioEncoder.dequeueInputBuffer(-1);
if (eibIndex >= 0) {
ByteBuffer dstAudioEncoderIBuffer = dstAudioEncoder.getInputBuffers()[eibIndex];
dstAudioEncoderIBuffer.position(0);
dstAudioEncoderIBuffer.put(filtered?filteredAudioBuff.buff:orignAudioBuff.buff, 0, orignAudioBuff.buff.length);
dstAudioEncoder.queueInputBuffer(eibIndex, 0, orignAudioBuff.buff.length, nowTimeMs * 1000, 0);
} else {
LogTools.d("dstAudioEncoder.dequeueInputBuffer(-1)<0");
}
LogTools.d("AudioFilterHandler,ProcessTime:" + (System.currentTimeMillis() - nowTimeMs));
}
/**
* @return ture if filter locked & filter!=null
*/
private boolean lockAudioFilter() {
try {
boolean locked = lockAudioFilter.tryLock(FILTER_LOCK_TOLERATION, TimeUnit.MILLISECONDS);
if (locked) {
if (audioFilter != null) {
return true;
} else {
lockAudioFilter.unlock();
return false;
}
} else {
return false;
}
} catch (InterruptedException e) {
}
return false;
}
private void unlockAudioFilter() {
lockAudioFilter.unlock();
}
}
}

View File

@@ -0,0 +1,555 @@
package me.lake.librestreaming.core;
import android.annotation.TargetApi;
import android.graphics.Bitmap;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import android.os.SystemClock;
import java.nio.ByteBuffer;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import me.lake.librestreaming.client.CallbackDelivery;
import me.lake.librestreaming.core.listener.RESScreenShotListener;
import me.lake.librestreaming.core.listener.RESVideoChangeListener;
import me.lake.librestreaming.encoder.MediaVideoEncoder;
import me.lake.librestreaming.filter.softvideofilter.BaseSoftVideoFilter;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.model.RESVideoBuff;
import me.lake.librestreaming.render.GLESRender;
import me.lake.librestreaming.render.IRender;
import me.lake.librestreaming.render.NativeRender;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.tools.BuffSizeCalculator;
import me.lake.librestreaming.tools.LogTools;
public class RESSoftVideoCore implements RESVideoCore {
RESCoreParameters resCoreParameters;
private final Object syncOp = new Object();
private SurfaceTexture cameraTexture;
private int currentCamera;
private MediaCodec dstVideoEncoder;
private boolean isEncoderStarted;
private final Object syncDstVideoEncoder = new Object();
private MediaFormat dstVideoFormat;
//render
private final Object syncPreview = new Object();
private IRender previewRender;
//filter
private Lock lockVideoFilter = null;
private BaseSoftVideoFilter videoFilter;
private VideoFilterHandler videoFilterHandler;
private HandlerThread videoFilterHandlerThread;
//sender
private VideoSenderThread videoSenderThread;
//VideoBuffs
//buffers to handle buff from queueVideo
private RESVideoBuff[] orignVideoBuffs;
private int lastVideoQueueBuffIndex;
//buffer to convert orignVideoBuff to NV21 if filter are set
private RESVideoBuff orignNV21VideoBuff;
//buffer to handle filtered color from filter if filter are set
private RESVideoBuff filteredNV21VideoBuff;
//buffer to convert other color format to suitable color format for dstVideoEncoder if nessesary
private RESVideoBuff suitable4VideoEncoderBuff;
final private Object syncResScreenShotListener = new Object();
private RESScreenShotListener resScreenShotListener;
private final Object syncIsLooping = new Object();
private boolean isPreviewing = false;
private boolean isStreaming = false;
private int loopingInterval;
public RESSoftVideoCore(RESCoreParameters parameters) {
resCoreParameters = parameters;
lockVideoFilter = new ReentrantLock(false);
videoFilter = null;
}
public void setCurrentCamera(int camIndex) {
if (currentCamera != camIndex) {
synchronized (syncOp) {
if (videoFilterHandler != null) {
videoFilterHandler.removeMessages(VideoFilterHandler.WHAT_INCOMING_BUFF);
}
if (orignVideoBuffs != null) {
for (RESVideoBuff buff : orignVideoBuffs) {
buff.isReadyToFill = true;
}
lastVideoQueueBuffIndex = 0;
}
}
}
currentCamera = camIndex;
}
@Override
public boolean prepare(RESConfig resConfig) {
synchronized (syncOp) {
resCoreParameters.renderingMode = resConfig.getRenderingMode();
resCoreParameters.mediacdoecAVCBitRate = resConfig.getBitRate();
resCoreParameters.videoBufferQueueNum = resConfig.getVideoBufferQueueNum();
resCoreParameters.mediacodecAVCIFrameInterval = resConfig.getVideoGOP();
resCoreParameters.mediacodecAVCFrameRate = resCoreParameters.videoFPS;
loopingInterval = 1000 / resCoreParameters.videoFPS;
dstVideoFormat = new MediaFormat();
synchronized (syncDstVideoEncoder) {
dstVideoEncoder = MediaCodecHelper.createSoftVideoMediaCodec(resCoreParameters, dstVideoFormat);
isEncoderStarted = false;
if (dstVideoEncoder == null) {
LogTools.e("create Video MediaCodec failed");
return false;
}
}
resCoreParameters.previewBufferSize = BuffSizeCalculator.calculator(resCoreParameters.videoWidth,
resCoreParameters.videoHeight, resCoreParameters.previewColorFormat);
//video
int videoWidth = resCoreParameters.videoWidth;
int videoHeight = resCoreParameters.videoHeight;
int videoQueueNum = resCoreParameters.videoBufferQueueNum;
orignVideoBuffs = new RESVideoBuff[videoQueueNum];
for (int i = 0; i < videoQueueNum; i++) {
orignVideoBuffs[i] = new RESVideoBuff(resCoreParameters.previewColorFormat, resCoreParameters.previewBufferSize);
}
lastVideoQueueBuffIndex = 0;
orignNV21VideoBuff = new RESVideoBuff(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
BuffSizeCalculator.calculator(videoWidth, videoHeight, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar));
filteredNV21VideoBuff = new RESVideoBuff(MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
BuffSizeCalculator.calculator(videoWidth, videoHeight, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar));
suitable4VideoEncoderBuff = new RESVideoBuff(resCoreParameters.mediacodecAVCColorFormat,
BuffSizeCalculator.calculator(videoWidth, videoHeight, resCoreParameters.mediacodecAVCColorFormat));
videoFilterHandlerThread = new HandlerThread("videoFilterHandlerThread");
videoFilterHandlerThread.start();
videoFilterHandler = new VideoFilterHandler(videoFilterHandlerThread.getLooper());
return true;
}
}
@Override
public boolean startStreaming(RESFlvDataCollecter flvDataCollecter) {
synchronized (syncOp) {
try {
synchronized (syncDstVideoEncoder) {
if (dstVideoEncoder == null) {
dstVideoEncoder = MediaCodec.createEncoderByType(dstVideoFormat.getString(MediaFormat.KEY_MIME));
}
dstVideoEncoder.configure(dstVideoFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
dstVideoEncoder.start();
isEncoderStarted = true;
}
videoSenderThread = new VideoSenderThread("VideoSenderThread", dstVideoEncoder, flvDataCollecter);
videoSenderThread.start();
synchronized (syncIsLooping) {
if (!isPreviewing && !isStreaming) {
videoFilterHandler.removeMessages(VideoFilterHandler.WHAT_DRAW);
videoFilterHandler.sendMessageDelayed(videoFilterHandler.obtainMessage(VideoFilterHandler.WHAT_DRAW, SystemClock.uptimeMillis() + loopingInterval), loopingInterval);
}
isStreaming = true;
}
} catch (Exception e) {
LogTools.trace("RESVideoClient.start()failed", e);
return false;
}
return true;
}
}
@Override
public void updateCamTexture(SurfaceTexture camTex) {
}
@Override
public boolean stopStreaming() {
synchronized (syncOp) {
videoSenderThread.quit();
synchronized (syncIsLooping) {
isStreaming = false;
}
try {
videoSenderThread.join();
} catch (InterruptedException e) {
LogTools.trace("RESCore", e);
}
synchronized (syncDstVideoEncoder) {
dstVideoEncoder.stop();
dstVideoEncoder.release();
dstVideoEncoder = null;
isEncoderStarted = false;
}
videoSenderThread = null;
return true;
}
}
@Override
public boolean destroy() {
synchronized (syncOp) {
lockVideoFilter.lock();
if (videoFilter != null) {
videoFilter.onDestroy();
}
lockVideoFilter.unlock();
return true;
}
}
@TargetApi(Build.VERSION_CODES.KITKAT)
@Override
public void reSetVideoBitrate(int bitrate) {
synchronized (syncOp) {
if (videoFilterHandler != null) {
videoFilterHandler.sendMessage(videoFilterHandler.obtainMessage(VideoFilterHandler.WHAT_RESET_BITRATE, bitrate, 0));
resCoreParameters.mediacdoecAVCBitRate = bitrate;
dstVideoFormat.setInteger(MediaFormat.KEY_BIT_RATE, resCoreParameters.mediacdoecAVCBitRate);
}
}
}
@TargetApi(Build.VERSION_CODES.KITKAT)
@Override
public int getVideoBitrate() {
synchronized (syncOp) {
return resCoreParameters.mediacdoecAVCBitRate;
}
}
@Override
public void reSetVideoFPS(int fps) {
synchronized (syncOp) {
resCoreParameters.videoFPS = fps;
loopingInterval = 1000 / resCoreParameters.videoFPS;
}
}
@Override
public void reSetVideoSize(RESCoreParameters newParameters) {
}
@Override
public void startPreview(SurfaceTexture surfaceTexture, int visualWidth, int visualHeight) {
synchronized (syncPreview) {
if (previewRender != null) {
throw new RuntimeException("startPreview without destroy previous");
}
switch (resCoreParameters.renderingMode) {
case RESCoreParameters.RENDERING_MODE_NATIVE_WINDOW:
previewRender = new NativeRender();
break;
case RESCoreParameters.RENDERING_MODE_OPENGLES:
previewRender = new GLESRender();
break;
default:
throw new RuntimeException("Unknow rendering mode");
}
previewRender.create(surfaceTexture,
resCoreParameters.previewColorFormat,
resCoreParameters.videoWidth,
resCoreParameters.videoHeight,
visualWidth,
visualHeight);
synchronized (syncIsLooping) {
if (!isPreviewing && !isStreaming) {
videoFilterHandler.removeMessages(VideoFilterHandler.WHAT_DRAW);
videoFilterHandler.sendMessageDelayed(videoFilterHandler.obtainMessage(VideoFilterHandler.WHAT_DRAW, SystemClock.uptimeMillis() + loopingInterval), loopingInterval);
}
isPreviewing = true;
}
}
}
@Override
public void updatePreview(int visualWidth, int visualHeight) {
synchronized (syncPreview) {
if (previewRender == null) {
throw new RuntimeException("updatePreview without startPreview");
}
previewRender.update(visualWidth, visualHeight);
}
}
@Override
public void stopPreview(boolean releaseTexture) {
synchronized (syncPreview) {
if (previewRender == null) {
throw new RuntimeException("stopPreview without startPreview");
}
previewRender.destroy(releaseTexture);
previewRender = null;
synchronized (syncIsLooping) {
isPreviewing = false;
}
}
}
public void queueVideo(byte[] rawVideoFrame) {
synchronized (syncOp) {
int targetIndex = (lastVideoQueueBuffIndex + 1) % orignVideoBuffs.length;
if (orignVideoBuffs[targetIndex].isReadyToFill) {
LogTools.d("queueVideo,accept ,targetIndex" + targetIndex);
acceptVideo(rawVideoFrame, orignVideoBuffs[targetIndex].buff);
orignVideoBuffs[targetIndex].isReadyToFill = false;
lastVideoQueueBuffIndex = targetIndex;
videoFilterHandler.sendMessage(videoFilterHandler.obtainMessage(VideoFilterHandler.WHAT_INCOMING_BUFF, targetIndex, 0));
} else {
LogTools.d("queueVideo,abandon,targetIndex" + targetIndex);
}
}
}
private void acceptVideo(byte[] src, byte[] dst) {
int directionFlag = currentCamera == Camera.CameraInfo.CAMERA_FACING_BACK ? resCoreParameters.backCameraDirectionMode : resCoreParameters.frontCameraDirectionMode;
ColorHelper.NV21Transform(src,
dst,
resCoreParameters.previewVideoWidth,
resCoreParameters.previewVideoHeight,
directionFlag);
}
public BaseSoftVideoFilter acquireVideoFilter() {
lockVideoFilter.lock();
return videoFilter;
}
public void releaseVideoFilter() {
lockVideoFilter.unlock();
}
public void setVideoFilter(BaseSoftVideoFilter baseSoftVideoFilter) {
lockVideoFilter.lock();
if (videoFilter != null) {
videoFilter.onDestroy();
}
videoFilter = baseSoftVideoFilter;
if (videoFilter != null) {
videoFilter.onInit(resCoreParameters.videoWidth, resCoreParameters.videoHeight);
}
lockVideoFilter.unlock();
}
@Override
public void takeScreenShot(RESScreenShotListener listener) {
synchronized (syncResScreenShotListener) {
resScreenShotListener = listener;
}
}
@Override
public void setVideoChangeListener(RESVideoChangeListener listener) {
}
@Override
public float getDrawFrameRate() {
synchronized (syncOp) {
return videoFilterHandler == null ? 0 : videoFilterHandler.getDrawFrameRate();
}
}
//worker handler
private class VideoFilterHandler extends Handler {
public static final int FILTER_LOCK_TOLERATION = 3;//3ms
public static final int WHAT_INCOMING_BUFF = 1;
public static final int WHAT_DRAW = 2;
public static final int WHAT_RESET_BITRATE = 3;
private int sequenceNum;
private RESFrameRateMeter drawFrameRateMeter;
VideoFilterHandler(Looper looper) {
super(looper);
sequenceNum = 0;
drawFrameRateMeter = new RESFrameRateMeter();
}
public float getDrawFrameRate() {
return drawFrameRateMeter.getFps();
}
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case WHAT_INCOMING_BUFF: {
int targetIndex = msg.arg1;
/**
* orignVideoBuffs[targetIndex] is ready
* orignVideoBuffs[targetIndex]->orignNV21VideoBuff
*/
System.arraycopy(orignVideoBuffs[targetIndex].buff, 0,
orignNV21VideoBuff.buff, 0, orignNV21VideoBuff.buff.length);
orignVideoBuffs[targetIndex].isReadyToFill = true;
}
break;
case WHAT_DRAW: {
long time = (Long) msg.obj;
long interval = time + loopingInterval - SystemClock.uptimeMillis();
synchronized (syncIsLooping) {
if (isPreviewing || isStreaming) {
if (interval > 0) {
videoFilterHandler.sendMessageDelayed(videoFilterHandler.obtainMessage(
VideoFilterHandler.WHAT_DRAW,
SystemClock.uptimeMillis() + interval),
interval);
} else {
videoFilterHandler.sendMessage(videoFilterHandler.obtainMessage(
VideoFilterHandler.WHAT_DRAW,
SystemClock.uptimeMillis() + loopingInterval));
}
}
}
sequenceNum++;
long nowTimeMs = SystemClock.uptimeMillis();
boolean isFilterLocked = lockVideoFilter();
if (isFilterLocked) {
boolean modified;
modified = videoFilter.onFrame(orignNV21VideoBuff.buff, filteredNV21VideoBuff.buff, nowTimeMs, sequenceNum);
unlockVideoFilter();
rendering(modified ? filteredNV21VideoBuff.buff : orignNV21VideoBuff.buff);
checkScreenShot(modified ? filteredNV21VideoBuff.buff : orignNV21VideoBuff.buff);
/**
* orignNV21VideoBuff is ready
* orignNV21VideoBuff->suitable4VideoEncoderBuff
*/
if (resCoreParameters.mediacodecAVCColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar) {
ColorHelper.NV21TOYUV420SP(modified ? filteredNV21VideoBuff.buff : orignNV21VideoBuff.buff,
suitable4VideoEncoderBuff.buff, resCoreParameters.videoWidth * resCoreParameters.videoHeight);
} else if (resCoreParameters.mediacodecAVCColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar) {
ColorHelper.NV21TOYUV420P(modified ? filteredNV21VideoBuff.buff : orignNV21VideoBuff.buff,
suitable4VideoEncoderBuff.buff, resCoreParameters.videoWidth * resCoreParameters.videoHeight);
} else {//LAKETODO colorConvert
}
} else {
rendering(orignNV21VideoBuff.buff);
checkScreenShot(orignNV21VideoBuff.buff);
if (resCoreParameters.mediacodecAVCColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar) {
ColorHelper.NV21TOYUV420SP(orignNV21VideoBuff.buff,
suitable4VideoEncoderBuff.buff,
resCoreParameters.videoWidth * resCoreParameters.videoHeight);
} else if (resCoreParameters.mediacodecAVCColorFormat == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar) {
ColorHelper.NV21TOYUV420P(orignNV21VideoBuff.buff,
suitable4VideoEncoderBuff.buff,
resCoreParameters.videoWidth * resCoreParameters.videoHeight);
}
orignNV21VideoBuff.isReadyToFill = true;
}
drawFrameRateMeter.count();
//suitable4VideoEncoderBuff is ready
synchronized (syncDstVideoEncoder) {
if (dstVideoEncoder != null && isEncoderStarted) {
int eibIndex = dstVideoEncoder.dequeueInputBuffer(-1);
if (eibIndex >= 0) {
ByteBuffer dstVideoEncoderIBuffer = dstVideoEncoder.getInputBuffers()[eibIndex];
dstVideoEncoderIBuffer.position(0);
dstVideoEncoderIBuffer.put(suitable4VideoEncoderBuff.buff, 0, suitable4VideoEncoderBuff.buff.length);
dstVideoEncoder.queueInputBuffer(eibIndex, 0, suitable4VideoEncoderBuff.buff.length, nowTimeMs * 1000, 0);
} else {
LogTools.d("dstVideoEncoder.dequeueInputBuffer(-1)<0");
}
}
}
LogTools.d("VideoFilterHandler,ProcessTime:" + (System.currentTimeMillis() - nowTimeMs));
}
break;
case WHAT_RESET_BITRATE: {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT && dstVideoEncoder != null) {
Bundle bitrateBundle = new Bundle();
bitrateBundle.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, msg.arg1);
dstVideoEncoder.setParameters(bitrateBundle);
}
}
break;
}
}
/**
* rendering nv21 using native window
*
* @param pixel
*/
private void rendering(byte[] pixel) {
synchronized (syncPreview) {
if (previewRender == null) {
return;
}
previewRender.rendering(pixel);
}
}
/**
* check if screenshotlistener exist
*
* @param pixel
*/
private void checkScreenShot(byte[] pixel) {
synchronized (syncResScreenShotListener) {
if (resScreenShotListener != null) {
int[] argbPixel = new int[resCoreParameters.videoWidth * resCoreParameters.videoHeight];
ColorHelper.NV21TOARGB(pixel,
argbPixel,
resCoreParameters.videoWidth,
resCoreParameters.videoHeight);
Bitmap result = Bitmap.createBitmap(argbPixel,
resCoreParameters.videoWidth,
resCoreParameters.videoHeight,
Bitmap.Config.ARGB_8888);
CallbackDelivery.i().post(new RESScreenShotListener.RESScreenShotListenerRunable(resScreenShotListener, result));
resScreenShotListener = null;
}
}
}
/**
* @return ture if filter locked & filter!=null
*/
private boolean lockVideoFilter() {
try {
boolean locked = lockVideoFilter.tryLock(FILTER_LOCK_TOLERATION, TimeUnit.MILLISECONDS);
if (locked) {
if (videoFilter != null) {
return true;
} else {
lockVideoFilter.unlock();
return false;
}
} else {
return false;
}
} catch (InterruptedException e) {
}
return false;
}
private void unlockVideoFilter() {
lockVideoFilter.unlock();
}
}
public void setVideoEncoder(final MediaVideoEncoder encoder) {
}
@Override
public void setMirror(boolean isEnableMirror, boolean isEnablePreviewMirror, boolean isEnableStreamMirror) {
}
public void setNeedResetEglContext(boolean bol){
}
}

View File

@@ -0,0 +1,53 @@
package me.lake.librestreaming.core;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import me.lake.librestreaming.core.listener.RESScreenShotListener;
import me.lake.librestreaming.core.listener.RESVideoChangeListener;
import me.lake.librestreaming.encoder.MediaVideoEncoder;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
public interface RESVideoCore {
int OVERWATCH_TEXTURE_ID = 10;
boolean prepare(RESConfig resConfig);
void updateCamTexture(SurfaceTexture camTex);
void startPreview(SurfaceTexture surfaceTexture, int visualWidth, int visualHeight);
void updatePreview(int visualWidth, int visualHeight);
void stopPreview(boolean releaseTexture);
boolean startStreaming(RESFlvDataCollecter flvDataCollecter);
boolean stopStreaming();
boolean destroy();
void reSetVideoBitrate(int bitrate);
int getVideoBitrate();
void reSetVideoFPS(int fps);
void reSetVideoSize(RESCoreParameters newParameters);
void setCurrentCamera(int cameraIndex);
void takeScreenShot(RESScreenShotListener listener);
void setVideoChangeListener(RESVideoChangeListener listener);
float getDrawFrameRate();
void setVideoEncoder(final MediaVideoEncoder encoder);
void setMirror(boolean isEnableMirror,boolean isEnablePreviewMirror,boolean isEnableStreamMirror);
void setNeedResetEglContext(boolean bol);
}

View File

@@ -0,0 +1,138 @@
package me.lake.librestreaming.core;
import android.media.MediaCodec;
import android.media.MediaFormat;
import java.nio.ByteBuffer;
import me.lake.librestreaming.rtmp.RESFlvData;
import me.lake.librestreaming.rtmp.RESFlvDataCollecter;
import me.lake.librestreaming.rtmp.RESRtmpSender;
import me.lake.librestreaming.tools.LogTools;
public class VideoSenderThread extends Thread {
private static final long WAIT_TIME = 5000;
private MediaCodec.BufferInfo eInfo;
private long startTime = 0;
private MediaCodec dstVideoEncoder;
private final Object syncDstVideoEncoder = new Object();
private RESFlvDataCollecter dataCollecter;
VideoSenderThread(String name, MediaCodec encoder, RESFlvDataCollecter flvDataCollecter) {
super(name);
eInfo = new MediaCodec.BufferInfo();
startTime = 0;
dstVideoEncoder = encoder;
dataCollecter = flvDataCollecter;
}
public void updateMediaCodec(MediaCodec encoder) {
synchronized (syncDstVideoEncoder) {
dstVideoEncoder = encoder;
}
}
private boolean shouldQuit = false;
void quit() {
shouldQuit = true;
this.interrupt();
}
@Override
public void run() {
while (!shouldQuit) {
synchronized (syncDstVideoEncoder) {
int eobIndex = MediaCodec.INFO_TRY_AGAIN_LATER;
try {
eobIndex = dstVideoEncoder.dequeueOutputBuffer(eInfo, WAIT_TIME);
} catch (Exception ignored) {
}
switch (eobIndex) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
LogTools.d("VideoSenderThread,MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED");
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
// LogTools.d("VideoSenderThread,MediaCodec.INFO_TRY_AGAIN_LATER");
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
LogTools.d("VideoSenderThread,MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:" +
dstVideoEncoder.getOutputFormat().toString());
sendAVCDecoderConfigurationRecord(0, dstVideoEncoder.getOutputFormat());
break;
default:
LogTools.d("VideoSenderThread,MediaCode,eobIndex=" + eobIndex);
if (startTime == 0) {
startTime = eInfo.presentationTimeUs / 1000;
}
/**
* we send sps pps already in INFO_OUTPUT_FORMAT_CHANGED
* so we ignore MediaCodec.BUFFER_FLAG_CODEC_CONFIG
*/
if (eInfo.flags != MediaCodec.BUFFER_FLAG_CODEC_CONFIG && eInfo.size != 0) {
ByteBuffer realData = dstVideoEncoder.getOutputBuffers()[eobIndex];
realData.position(eInfo.offset + 4);
realData.limit(eInfo.offset + eInfo.size);
sendRealData((eInfo.presentationTimeUs / 1000) - startTime, realData);
}
dstVideoEncoder.releaseOutputBuffer(eobIndex, false);
break;
}
}
try {
sleep(5);
} catch (InterruptedException ignored) {
}
}
eInfo = null;
}
private void sendAVCDecoderConfigurationRecord(long tms, MediaFormat format) {
byte[] AVCDecoderConfigurationRecord = Packager.H264Packager.generateAVCDecoderConfigurationRecord(format);
int packetLen = Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH +
AVCDecoderConfigurationRecord.length;
byte[] finalBuff = new byte[packetLen];
Packager.FLVPackager.fillFlvVideoTag(finalBuff,
0,
true,
true,
AVCDecoderConfigurationRecord.length);
System.arraycopy(AVCDecoderConfigurationRecord, 0,
finalBuff, Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH, AVCDecoderConfigurationRecord.length);
RESFlvData resFlvData = new RESFlvData();
resFlvData.droppable = false;
resFlvData.byteBuffer = finalBuff;
resFlvData.size = finalBuff.length;
resFlvData.dts = (int) tms;
resFlvData.flvTagType = RESFlvData.FLV_RTMP_PACKET_TYPE_VIDEO;
resFlvData.videoFrameType = RESFlvData.NALU_TYPE_IDR;
dataCollecter.collect(resFlvData, RESRtmpSender.FROM_VIDEO);
}
private void sendRealData(long tms, ByteBuffer realData) {
int realDataLength = realData.remaining();
int packetLen = Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH +
Packager.FLVPackager.NALU_HEADER_LENGTH +
realDataLength;
byte[] finalBuff = new byte[packetLen];
realData.get(finalBuff, Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH +
Packager.FLVPackager.NALU_HEADER_LENGTH,
realDataLength);
int frameType = finalBuff[Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH +
Packager.FLVPackager.NALU_HEADER_LENGTH] & 0x1F;
Packager.FLVPackager.fillFlvVideoTag(finalBuff,
0,
false,
frameType == 5,
realDataLength);
RESFlvData resFlvData = new RESFlvData();
resFlvData.droppable = true;
resFlvData.byteBuffer = finalBuff;
resFlvData.size = finalBuff.length;
resFlvData.dts = (int) tms;
resFlvData.flvTagType = RESFlvData.FLV_RTMP_PACKET_TYPE_VIDEO;
resFlvData.videoFrameType = frameType;
dataCollecter.collect(resFlvData, RESRtmpSender.FROM_VIDEO);
}
}

View File

@@ -0,0 +1,27 @@
package me.lake.librestreaming.core.listener;
public interface RESConnectionListener {
void onOpenConnectionResult(int result);
void onWriteError(int errno);
void onCloseConnectionResult(int result);
class RESWriteErrorRunable implements Runnable {
RESConnectionListener connectionListener;
int errno;
public RESWriteErrorRunable(RESConnectionListener connectionListener, int errno) {
this.connectionListener = connectionListener;
this.errno = errno;
}
@Override
public void run() {
if (connectionListener != null) {
connectionListener.onWriteError(errno);
}
}
}
}

View File

@@ -0,0 +1,25 @@
package me.lake.librestreaming.core.listener;
import android.graphics.Bitmap;
public interface RESScreenShotListener {
void onScreenShotResult(Bitmap bitmap);
class RESScreenShotListenerRunable implements Runnable {
Bitmap resultBitmap;
RESScreenShotListener resScreenShotListener;
public RESScreenShotListenerRunable(RESScreenShotListener listener, Bitmap bitmap) {
resScreenShotListener = listener;
resultBitmap = bitmap;
}
@Override
public void run() {
if (resScreenShotListener != null) {
resScreenShotListener.onScreenShotResult(resultBitmap);
}
}
}
}

View File

@@ -0,0 +1,24 @@
package me.lake.librestreaming.core.listener;
public interface RESVideoChangeListener {
void onVideoSizeChanged(int width, int height);
class RESVideoChangeRunable implements Runnable {
RESVideoChangeListener videoChangeListener;
int w, h;
public RESVideoChangeRunable(RESVideoChangeListener videoChangeListener, int w, int h) {
this.videoChangeListener = videoChangeListener;
this.w = w;
this.h = h;
}
@Override
public void run() {
if (videoChangeListener != null) {
videoChangeListener.onVideoSizeChanged(w, h);
}
}
}
}

View File

@@ -0,0 +1,186 @@
package me.lake.librestreaming.encoder;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.util.Log;
import java.io.IOException;
import java.nio.ByteBuffer;
public class MediaAudioEncoder extends MediaEncoder {
private static final boolean DEBUG = false; // TODO set false on release
private static final String TAG = "MediaAudioEncoder";
private static final String MIME_TYPE = "audio/mp4a-latm";
private static final int SAMPLE_RATE = 44100; // 44.1[KHz] is only setting guaranteed to be available on all devices.
private static final int BIT_RATE = 64000;
public static final int SAMPLES_PER_FRAME = 1024; // AAC, bytes/frame/channel
public static final int FRAMES_PER_BUFFER = 25; // AAC, frame/buffer/sec
private AudioThread mAudioThread = null;
public MediaAudioEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener) {
super(muxer, listener);
}
@Override
protected void prepare() throws IOException {
if (DEBUG) Log.v(TAG, "prepare:");
mTrackIndex = -1;
mMuxerStarted = mIsEOS = false;
// prepare MediaCodec for AAC encoding of audio data from inernal mic.
final MediaCodecInfo audioCodecInfo = selectAudioCodec(MIME_TYPE);
if (audioCodecInfo == null) {
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
return;
}
if (DEBUG) Log.i(TAG, "selected codec: " + audioCodecInfo.getName());
final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, 2);
audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_STEREO);
audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 2);
// audioFormat.setLong(MediaFormat.KEY_MAX_INPUT_SIZE, inputFile.length());
// audioFormat.setLong(MediaFormat.KEY_DURATION, (long)durationInMs );
if (DEBUG) Log.i(TAG, "format: " + audioFormat);
mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mMediaCodec.start();
if (DEBUG) Log.i(TAG, "prepare finishing");
if (mListener != null) {
try {
mListener.onPrepared(this);
} catch (final Exception e) {
Log.e(TAG, "prepare:", e);
}
}
}
@Override
protected void startRecording() {
super.startRecording();
// create and execute audio capturing thread using internal mic
if (mAudioThread == null) {
mAudioThread = new AudioThread();
mAudioThread.start();
}
}
@Override
protected void release() {
mAudioThread = null;
super.release();
}
private static final int[] AUDIO_SOURCES = new int[] {
MediaRecorder.AudioSource.MIC,
MediaRecorder.AudioSource.DEFAULT,
MediaRecorder.AudioSource.CAMCORDER,
MediaRecorder.AudioSource.VOICE_COMMUNICATION,
MediaRecorder.AudioSource.VOICE_RECOGNITION,
};
/**
* Thread to capture audio data from internal mic as uncompressed 16bit PCM data
* and write them to the MediaCodec encoder
*/
private class AudioThread extends Thread {
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
try {
final int min_buffer_size = AudioRecord.getMinBufferSize(
SAMPLE_RATE, AudioFormat.CHANNEL_IN_STEREO,
AudioFormat.ENCODING_PCM_16BIT);
int buffer_size = SAMPLES_PER_FRAME * FRAMES_PER_BUFFER;
if (buffer_size < min_buffer_size)
buffer_size = ((min_buffer_size / SAMPLES_PER_FRAME) + 1) * SAMPLES_PER_FRAME * 2;
AudioRecord audioRecord = null;
for (final int source : AUDIO_SOURCES) {
try {
audioRecord = new AudioRecord(
source, SAMPLE_RATE,
AudioFormat.CHANNEL_IN_STEREO, AudioFormat.ENCODING_PCM_16BIT, buffer_size);
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED)
audioRecord = null;
} catch (final Exception e) {
audioRecord = null;
}
if (audioRecord != null) break;
}
if (audioRecord != null) {
try {
if (mIsCapturing) {
if (DEBUG) Log.v(TAG, "AudioThread:start audio recording");
final ByteBuffer buf = ByteBuffer.allocateDirect(SAMPLES_PER_FRAME);
int readBytes;
audioRecord.startRecording();
try {
for (; mIsCapturing && !mRequestStop && !mIsEOS ;) {
// read audio data from internal mic
buf.clear();
readBytes = audioRecord.read(buf, SAMPLES_PER_FRAME);
if (readBytes > 0) {
// set audio data to encoder
buf.position(readBytes);
buf.flip();
encode(buf, readBytes, getPTSUs());
frameAvailableSoon();
}
}
frameAvailableSoon();
} finally {
audioRecord.stop();
}
}
} finally {
audioRecord.release();
}
} else {
Log.e(TAG, "failed to initialize AudioRecord");
}
} catch (final Exception e) {
Log.e(TAG, "AudioThread#run", e);
}
if (DEBUG) Log.v(TAG, "AudioThread:finished");
}
}
/**
* select the first codec that match a specific MIME type
* @param mimeType
* @return
*/
private static final MediaCodecInfo selectAudioCodec(final String mimeType) {
if (DEBUG) Log.v(TAG, "selectAudioCodec:");
MediaCodecInfo result = null;
// get the list of available codecs
final int numCodecs = MediaCodecList.getCodecCount();
LOOP: for (int i = 0; i < numCodecs; i++) {
final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) { // skipp decoder
continue;
}
final String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (DEBUG) Log.i(TAG, "supportedType:" + codecInfo.getName() + ",MIME=" + types[j]);
if (types[j].equalsIgnoreCase(mimeType)) {
if (result == null) {
result = codecInfo;
break LOOP;
}
}
}
}
return result;
}
}

View File

@@ -0,0 +1,379 @@
package me.lake.librestreaming.encoder;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.util.Log;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.nio.ByteBuffer;
public abstract class MediaEncoder implements Runnable {
private static final boolean DEBUG = false; // TODO set false on release
private static final String TAG = "MediaEncoder";
protected static final int TIMEOUT_USEC = 10000; // 10[msec]
protected static final int MSG_FRAME_AVAILABLE = 1;
protected static final int MSG_STOP_RECORDING = 9;
public interface MediaEncoderListener {
public void onPrepared(MediaEncoder encoder);
public void onStopped(MediaEncoder encoder);
}
protected final Object mSync = new Object();
/**
* Flag that indicate this encoder is capturing now.
*/
protected volatile boolean mIsCapturing;
/**
* Flag that indicate the frame data will be available soon.
*/
private int mRequestDrain;
/**
* Flag to request stop capturing
*/
protected volatile boolean mRequestStop;
/**
* Flag that indicate encoder received EOS(End Of Stream)
*/
protected boolean mIsEOS;
/**
* Flag the indicate the muxer is running
*/
protected boolean mMuxerStarted;
/**
* Track Number
*/
protected int mTrackIndex;
/**
* MediaCodec instance for encoding
*/
protected MediaCodec mMediaCodec; // API >= 16(Android4.1.2)
/**
* Weak refarence of MediaMuxerWarapper instance
*/
protected final WeakReference<MediaMuxerWrapper> mWeakMuxer;
/**
* BufferInfo instance for dequeuing
*/
private MediaCodec.BufferInfo mBufferInfo; // API >= 16(Android4.1.2)
protected final MediaEncoderListener mListener;
public MediaEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener) {
if (listener == null) throw new NullPointerException("MediaEncoderListener is null");
if (muxer == null) throw new NullPointerException("MediaMuxerWrapper is null");
mWeakMuxer = new WeakReference<MediaMuxerWrapper>(muxer);
muxer.addEncoder(this);
mListener = listener;
synchronized (mSync) {
// create BufferInfo here for effectiveness(to reduce GC)
mBufferInfo = new MediaCodec.BufferInfo();
// wait for starting thread
new Thread(this, getClass().getSimpleName()).start();
try {
mSync.wait();
} catch (final InterruptedException e) {
}
}
}
public String getOutputPath() {
final MediaMuxerWrapper muxer = mWeakMuxer.get();
return muxer != null ? muxer.getOutputPath() : null;
}
/**
* the method to indicate frame data is soon available or already available
* @return return true if encoder is ready to encod.
*/
public boolean frameAvailableSoon() {
if (DEBUG) Log.v(TAG, "frameAvailableSoon");
synchronized (mSync) {
if (!mIsCapturing || mRequestStop) {
return false;
}
mRequestDrain++;
mSync.notifyAll();
}
return true;
}
/**
* encoding loop on private thread
*/
@Override
public void run() {
// android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
synchronized (mSync) {
mRequestStop = false;
mRequestDrain = 0;
mSync.notify();
}
final boolean isRunning = true;
boolean localRequestStop;
boolean localRequestDrain;
while (isRunning) {
synchronized (mSync) {
localRequestStop = mRequestStop;
localRequestDrain = (mRequestDrain > 0);
if (localRequestDrain)
mRequestDrain--;
}
if (localRequestStop) {
drain();
// request stop recording
signalEndOfInputStream();
// process output data again for EOS signale
drain();
// release all related objects
release();
break;
}
if (localRequestDrain) {
drain();
} else {
synchronized (mSync) {
try {
mSync.wait();
} catch (final InterruptedException e) {
break;
}
}
}
} // end of while
if (DEBUG) Log.d(TAG, "Encoder thread exiting");
synchronized (mSync) {
mRequestStop = true;
mIsCapturing = false;
}
}
/*
* prepareing method for each sub class
* this method should be implemented in sub class, so set this as abstract method
* @throws IOException
*/
/*package*/ abstract void prepare() throws IOException;
/*package*/ void startRecording() {
if (DEBUG) Log.v(TAG, "startRecording");
synchronized (mSync) {
mIsCapturing = true;
mRequestStop = false;
mSync.notifyAll();
}
}
/**
* the method to request stop encoding
*/
/*package*/ void stopRecording() {
if (DEBUG) Log.v(TAG, "stopRecording");
synchronized (mSync) {
if (!mIsCapturing || mRequestStop) {
return;
}
mRequestStop = true; // for rejecting newer frame
mSync.notifyAll();
// We can not know when the encoding and writing finish.
// so we return immediately after request to avoid delay of caller thread
}
}
//********************************************************************************
//********************************************************************************
/**
* Release all releated objects
*/
protected void release() {
if (DEBUG) Log.d(TAG, "release:");
try {
mListener.onStopped(this);
} catch (final Exception e) {
Log.e(TAG, "failed onStopped", e);
}
mIsCapturing = false;
if (mMediaCodec != null) {
try {
mMediaCodec.stop();
mMediaCodec.release();
mMediaCodec = null;
} catch (final Exception e) {
Log.e(TAG, "failed releasing MediaCodec", e);
}
}
if (mMuxerStarted) {
final MediaMuxerWrapper muxer = mWeakMuxer != null ? mWeakMuxer.get() : null;
if (muxer != null) {
try {
muxer.stop();
} catch (final Exception e) {
Log.e(TAG, "failed stopping muxer", e);
}
}
}
mBufferInfo = null;
}
protected void signalEndOfInputStream() {
if (DEBUG) Log.d(TAG, "sending EOS to encoder");
// signalEndOfInputStream is only avairable for video encoding with surface
// and equivalent sending a empty buffer with BUFFER_FLAG_END_OF_STREAM flag.
// mMediaCodec.signalEndOfInputStream(); // API >= 18
encode(null, 0, getPTSUs());
}
/**
* Method to set byte array to the MediaCodec encoder
* @param buffer
* @param length length of byte array, zero means EOS.
* @param presentationTimeUs
*/
protected void encode(final ByteBuffer buffer, final int length, final long presentationTimeUs) {
if (!mIsCapturing) return;
final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
while (mIsCapturing) {
final int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
Log.e(TAG, "inputBufferIndex: "+inputBufferIndex );
if (inputBufferIndex >= 0) {
final ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
if (buffer != null) {
inputBuffer.put(buffer);
}
if (DEBUG) Log.v(TAG, "encode:queueInputBuffer");
if (length <= 0) {
// send EOS
mIsEOS = true;
if (DEBUG) Log.i(TAG, "send BUFFER_FLAG_END_OF_STREAM");
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, 0,
presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
break;
} else {
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, length,
presentationTimeUs, 0);
}
break;
} else if (inputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// wait for MediaCodec encoder is ready to encode
// nothing to do here because MediaCodec#dequeueInputBuffer(TIMEOUT_USEC)
// will wait for maximum TIMEOUT_USEC(10msec) on each call
}
}
}
/**
* drain encoded data and write them to muxer
*/
protected void drain() {
if (mMediaCodec == null) return;
ByteBuffer[] encoderOutputBuffers = mMediaCodec.getOutputBuffers();
Log.e(TAG, "encoderOutputBuffers: "+encoderOutputBuffers.length );
int encoderStatus, count = 0;
final MediaMuxerWrapper muxer = mWeakMuxer.get();
if (muxer == null) {
// throw new NullPointerException("muxer is unexpectedly null");
Log.w(TAG, "muxer is unexpectedly null");
return;
}
Log.e(TAG, "mIsCapturing: "+mIsCapturing );
LOOP: while (mIsCapturing) {
// get encoded data with maximum timeout duration of TIMEOUT_USEC(=10[msec])
encoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
Log.e(TAG, "encoderStatus: "+encoderStatus );
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// wait 5 counts(=TIMEOUT_USEC x 5 = 50msec) until data/EOS come
if (!mIsEOS) {
if (++count > 5)
break LOOP; // out of while
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
if (DEBUG) Log.v(TAG, "INFO_OUTPUT_BUFFERS_CHANGED");
// this shoud not come when encoding
encoderOutputBuffers = mMediaCodec.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
if (DEBUG) Log.v(TAG, "INFO_OUTPUT_FORMAT_CHANGED");
// this status indicate the output format of codec is changed
// this should come only once before actual encoded data
// but this status never come on Android4.3 or less
// and in that case, you should treat when MediaCodec.BUFFER_FLAG_CODEC_CONFIG come.
if (mMuxerStarted) { // second time request is error
throw new RuntimeException("format changed twice");
}
// get output format from codec and pass them to muxer
// getOutputFormat should be called after INFO_OUTPUT_FORMAT_CHANGED otherwise crash.
final MediaFormat format = mMediaCodec.getOutputFormat(); // API >= 16
mTrackIndex = muxer.addTrack(format);
mMuxerStarted = true;
if (!muxer.start()) {
// we should wait until muxer is ready
synchronized (muxer) {
while (!muxer.isStarted())
try {
muxer.wait(100);
} catch (final InterruptedException e) {
break LOOP;
}
}
}
} else if (encoderStatus < 0) {
// unexpected status
if (DEBUG) Log.w(TAG, "drain:unexpected result from encoder#dequeueOutputBuffer: " + encoderStatus);
} else {
final ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
// this never should come...may be a MediaCodec internal error
throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// You shoud set output format to muxer here when you target Android4.3 or less
// but MediaCodec#getOutputFormat can not call here(because INFO_OUTPUT_FORMAT_CHANGED don't come yet)
// therefor we should expand and prepare output format from buffer data.
// This sample is for API>=18(>=Android 4.3), just ignore this flag here
if (DEBUG) Log.d(TAG, "drain:BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
// encoded data is ready, clear waiting counter
count = 0;
if (!mMuxerStarted) {
// muxer is not ready...this will prrograming failure.
throw new RuntimeException("drain:muxer hasn't started");
}
// write encoded data to muxer(need to adjust presentationTimeUs.
mBufferInfo.presentationTimeUs = getPTSUs();
muxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
prevOutputPTSUs = mBufferInfo.presentationTimeUs;
}
// return buffer to encoder
mMediaCodec.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
// when EOS come.
mIsCapturing = false;
break; // out of while
}
}
}
}
/**
* previous presentationTimeUs for writing
*/
private long prevOutputPTSUs = 0;
/**
* get next encoding presentationTimeUs
* @return
*/
protected long getPTSUs() {
long result = System.nanoTime() / 1000L;
// presentationTimeUs should be monotonic
// otherwise muxer fail to write
if (result < prevOutputPTSUs)
result = (prevOutputPTSUs - result) + result;
return result;
}
}

View File

@@ -0,0 +1,232 @@
package me.lake.librestreaming.encoder;
import android.content.Context;
import android.media.MediaCodec;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.Environment;
import android.text.TextUtils;
import android.util.Log;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.text.SimpleDateFormat;
import java.util.GregorianCalendar;
import java.util.Locale;
public class MediaMuxerWrapper {
private static final boolean DEBUG = false; // TODO set false on release
private static final String TAG = "MediaMuxerWrapper";
private static final String DIR_NAME = "WSLive";
private static final SimpleDateFormat mDateTimeFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss", Locale.US);
private String mOutputPath;
private final MediaMuxer mMediaMuxer; // API >= 18
private int mEncoderCount, mStatredCount;
private boolean mIsStarted;
private MediaEncoder mVideoEncoder, mAudioEncoder;
/**
* Constructor
* @param ext extension of output file
* @throws IOException
*/
public MediaMuxerWrapper(String ext) throws IOException {
if (TextUtils.isEmpty(ext)) ext = ".mp4";
try {
mOutputPath = getCaptureFile(Environment.DIRECTORY_MOVIES, ext).toString();
//mOutputPath =newTmpDir("Movies");/* getCaptureFile(Environment.DIRECTORY_MOVIES, ext).toString();*/
} catch (final NullPointerException e) {
throw new RuntimeException("This app has no permission of writing external storage");
}
mMediaMuxer = new MediaMuxer(mOutputPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
mEncoderCount = mStatredCount = 0;
mIsStarted = false;
}
public static final String ROOT_DIR = "video";
private static final String DIR_TMP = "tmp";
private static Context mContext;
public static void setContext(Context context){
mContext = context;
}
/**
* 新建tmp目录,tmp/xxx/
*
* @param dirName
* @return
*/
public static String newTmpDir(String dirName) {
File tmpDir = new File(getStorageRoot(mContext, ROOT_DIR, true), DIR_TMP);
if (!tmpDir.exists() || !tmpDir.isDirectory()) {
tmpDir.mkdirs();
}
File dir = new File(tmpDir, dirName);
if (!dir.exists() || !dir.isDirectory()) {
dir.mkdirs();
}
return dir.getAbsolutePath()+getDateTimeString() + ".mp4";
}
/**
* 获取缓存root路径
*
* @param context
* @param isExternFirst 是否外存优先
* @return
*/
public static File getStorageRoot(Context context, String dirName, boolean isExternFirst) {
File cacheDir = null;
if ((Environment.MEDIA_MOUNTED.equals(Environment.getExternalStorageState())
|| !Environment.isExternalStorageRemovable()) && isExternFirst) {
cacheDir = context.getExternalCacheDir();
} else {
cacheDir = context.getCacheDir();
}
File dir = new File(cacheDir, dirName);
if (!dir.exists() || !dir.isDirectory()) {
dir.mkdirs();
}
return dir;
}
public String getOutputPath() {
return mOutputPath;
}
public void prepare() throws IOException {
if (mVideoEncoder != null)
mVideoEncoder.prepare();
if (mAudioEncoder != null)
mAudioEncoder.prepare();
}
public void startRecording() {
if (mVideoEncoder != null)
mVideoEncoder.startRecording();
if (mAudioEncoder != null)
mAudioEncoder.startRecording();
}
public void stopRecording() {
if (mVideoEncoder != null)
mVideoEncoder.stopRecording();
mVideoEncoder = null;
if (mAudioEncoder != null)
mAudioEncoder.stopRecording();
mAudioEncoder = null;
}
public synchronized boolean isStarted() {
return mIsStarted;
}
//**********************************************************************
//**********************************************************************
/**
* assign encoder to this calss. this is called from encoder.
* @param encoder instance of MediaVideoEncoder or MediaAudioEncoder
*/
/*package*/ void addEncoder(final MediaEncoder encoder) {
if (encoder instanceof MediaVideoEncoder) {
if (mVideoEncoder != null)
throw new IllegalArgumentException("Video encoder already added.");
mVideoEncoder = encoder;
} else if (encoder instanceof MediaAudioEncoder) {
if (mAudioEncoder != null)
throw new IllegalArgumentException("Video encoder already added.");
mAudioEncoder = encoder;
} else
throw new IllegalArgumentException("unsupported encoder");
mEncoderCount = (mVideoEncoder != null ? 1 : 0) + (mAudioEncoder != null ? 1 : 0);
}
/**
* request start recording from encoder
* @return true when muxer is ready to write
*/
/*package*/ synchronized boolean start() {
if (DEBUG) Log.v(TAG, "start:");
mStatredCount++;
if ((mEncoderCount > 0) && (mStatredCount == mEncoderCount)) {
mMediaMuxer.start();
mIsStarted = true;
notifyAll();
if (DEBUG) Log.v(TAG, "MediaMuxer started:");
}
return mIsStarted;
}
/**
* request stop recording from encoder when encoder received EOS
*/
/*package*/ synchronized void stop() {
if (DEBUG) Log.v(TAG, "stop:mStatredCount=" + mStatredCount);
mStatredCount--;
if ((mEncoderCount > 0) && (mStatredCount <= 0)) {
mMediaMuxer.stop();
mMediaMuxer.release();
mIsStarted = false;
if (DEBUG) Log.v(TAG, "MediaMuxer stopped:");
}
}
/**
* assign encoder to muxer
* @param format
* @return minus value indicate error
*/
/*package*/ synchronized int addTrack(final MediaFormat format) {
if (mIsStarted)
throw new IllegalStateException("muxer already started");
final int trackIx = mMediaMuxer.addTrack(format);
if (DEBUG) Log.i(TAG, "addTrack:trackNum=" + mEncoderCount + ",trackIx=" + trackIx + ",format=" + format);
return trackIx;
}
/**
* write encoded data to muxer
* @param trackIndex
* @param byteBuf
* @param bufferInfo
*/
/*package*/ synchronized void writeSampleData(final int trackIndex, final ByteBuffer byteBuf, final MediaCodec.BufferInfo bufferInfo) {
if (mStatredCount > 0)
mMediaMuxer.writeSampleData(trackIndex, byteBuf, bufferInfo);
}
//**********************************************************************
//**********************************************************************
/**
* generate output file
* @param type Environment.DIRECTORY_MOVIES / Environment.DIRECTORY_DCIM etc.
* @param ext .mp4(.m4a for audio) or .png
* @return return null when this app has no writing permission to external storage.
*/
public static final File getCaptureFile(final String type, final String ext) {
final File dir = new File(Environment.getExternalStoragePublicDirectory(type), DIR_NAME);
Log.d(TAG, "path=" + dir.toString());
dir.mkdirs();
if (dir.canWrite()) {
return new File(dir, getDateTimeString() + ext);
}
return null;
}
/**
* get current date and time as String
* @return
*/
private static final String getDateTimeString() {
final GregorianCalendar now = new GregorianCalendar();
return mDateTimeFormat.format(now.getTime());
}
public String getFilePath(){
return mOutputPath;
}
}

View File

@@ -0,0 +1,251 @@
package me.lake.librestreaming.encoder;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.opengl.EGLContext;
import android.opengl.Matrix;
import android.util.Log;
import android.view.Surface;
import java.io.IOException;
import me.lake.librestreaming.encoder.utils.RenderHandler;
public class MediaVideoEncoder extends MediaEncoder {
private static final boolean DEBUG = false; // TODO set false on release
private static final String TAG = "MediaVideoEncoder";
private static final String MIME_TYPE = "video/avc";
// parameters for recording
private static final int FRAME_RATE = 24;
private static final float BPP = 0.25f;
private final int mWidth;
private final int mHeight;
private RenderHandler mRenderHandler;
private Surface mSurface;
private int previewW, previewH; //预览宽高
private float[] mvpMatrix = new float[]{
1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1
};
private boolean isMatrixCalc = false;
public MediaVideoEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener, final int width, final int height) {
super(muxer, listener);
if (DEBUG) Log.i(TAG, "MediaVideoEncoder: ");
mWidth = width;
mHeight = height;
mRenderHandler = RenderHandler.createHandler(TAG);
}
public boolean frameAvailableSoon(final float[] tex_matrix) {
boolean result;
if (result = super.frameAvailableSoon())
mRenderHandler.draw(tex_matrix);
return result;
}
public boolean frameAvailableSoon(final float[] tex_matrix, final float[] mvp_matrix) {
boolean result;
if (result = super.frameAvailableSoon())
mRenderHandler.draw(tex_matrix, mvp_matrix);
return result;
}
@Override
public boolean frameAvailableSoon() {
boolean result;
if (result = super.frameAvailableSoon())
mRenderHandler.draw(null);
return result;
}
@Override
protected void prepare() throws IOException {
if (DEBUG) Log.i(TAG, "prepare: ");
mTrackIndex = -1;
mMuxerStarted = mIsEOS = false;
final MediaCodecInfo videoCodecInfo = selectVideoCodec(MIME_TYPE);
if (videoCodecInfo == null) {
Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
return;
}
if (DEBUG) Log.i(TAG, "selected codec: " + videoCodecInfo.getName());
final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); // API >= 18
format.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate());
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 6);
if (DEBUG) Log.i(TAG, "format: " + format);
mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
// get Surface for encoder input
// this method only can call between #configure and #start
mSurface = mMediaCodec.createInputSurface(); // API >= 18
mMediaCodec.start();
if (DEBUG) Log.i(TAG, "prepare finishing");
if (mListener != null) {
try {
mListener.onPrepared(this);
} catch (final Exception e) {
Log.e(TAG, "prepare:", e);
}
}
}
public void setEglContext(final EGLContext shared_context, final int tex_id) {
mRenderHandler.setEglContext(shared_context, tex_id, mSurface, true);
}
@Override
protected void release() {
if (DEBUG) Log.i(TAG, "release:");
if (mSurface != null) {
mSurface.release();
mSurface = null;
}
if (mRenderHandler != null) {
mRenderHandler.release();
mRenderHandler = null;
}
super.release();
}
private int calcBitRate() {
final int bitrate = (int) (BPP * FRAME_RATE * mWidth * mHeight);
Log.i(TAG, String.format("bitrate=%5.2f[Mbps]", bitrate / 1024f / 1024f));
return bitrate;
}
/**
* select the first codec that match a specific MIME type
*
* @param mimeType
* @return null if no codec matched
*/
protected static final MediaCodecInfo selectVideoCodec(final String mimeType) {
if (DEBUG) Log.v(TAG, "selectVideoCodec:");
// get the list of available codecs
final int numCodecs = MediaCodecList.getCodecCount();
for (int i = 0; i < numCodecs; i++) {
final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
if (!codecInfo.isEncoder()) { // skipp decoder
continue;
}
// select first codec that match a specific MIME type and color format
final String[] types = codecInfo.getSupportedTypes();
for (int j = 0; j < types.length; j++) {
if (types[j].equalsIgnoreCase(mimeType)) {
if (DEBUG) Log.i(TAG, "codec:" + codecInfo.getName() + ",MIME=" + types[j]);
final int format = selectColorFormat(codecInfo, mimeType);
if (format > 0) {
return codecInfo;
}
}
}
}
return null;
}
/**
* select color format available on specific codec and we can use.
*
* @return 0 if no colorFormat is matched
*/
protected static final int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) {
if (DEBUG) Log.i(TAG, "selectColorFormat: ");
int result = 0;
final MediaCodecInfo.CodecCapabilities caps;
try {
Thread.currentThread().setPriority(Thread.MAX_PRIORITY);
caps = codecInfo.getCapabilitiesForType(mimeType);
} finally {
Thread.currentThread().setPriority(Thread.NORM_PRIORITY);
}
int colorFormat;
for (int i = 0; i < caps.colorFormats.length; i++) {
colorFormat = caps.colorFormats[i];
if (isRecognizedViewoFormat(colorFormat)) {
if (result == 0)
result = colorFormat;
break;
}
}
if (result == 0)
Log.e(TAG, "couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType);
return result;
}
/**
* color formats that we can use in this class
*/
protected static int[] recognizedFormats;
static {
recognizedFormats = new int[]{
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar,
// MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
// MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface,
};
}
private static final boolean isRecognizedViewoFormat(final int colorFormat) {
if (DEBUG) Log.i(TAG, "isRecognizedViewoFormat:colorFormat=" + colorFormat);
final int n = recognizedFormats != null ? recognizedFormats.length : 0;
for (int i = 0; i < n; i++) {
if (recognizedFormats[i] == colorFormat) {
return true;
}
}
return false;
}
@Override
protected void signalEndOfInputStream() {
if (DEBUG) Log.d(TAG, "sending EOS to encoder");
mMediaCodec.signalEndOfInputStream(); // API >= 18
mIsEOS = true;
}
public void setPreviewWH(int previewW, int previewH) {
this.previewW = previewW;
this.previewH = previewH;
}
public float[] getMvpMatrix() {
if (previewW < 1 || previewH < 1) return null;
if (isMatrixCalc) return mvpMatrix;
float encodeWHRatio = mWidth * 1.0f / mHeight;
float previewWHRatio = previewW * 1.0f / previewH;
float[] projection = new float[16];
float[] camera = new float[16];
if (encodeWHRatio > previewWHRatio) {
Matrix.orthoM(projection, 0, -1, 1, -previewWHRatio / encodeWHRatio, previewWHRatio / encodeWHRatio, 1, 3);
} else {
Matrix.orthoM(projection, 0, -encodeWHRatio / previewWHRatio, encodeWHRatio / previewWHRatio, -1, 1, 1, 3);
}
Matrix.setLookAtM(camera, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0);
Matrix.multiplyMM(mvpMatrix, 0, projection, 0, camera, 0);
isMatrixCalc = true;
return mvpMatrix;
}
}

View File

@@ -0,0 +1,324 @@
package me.lake.librestreaming.encoder.utils;
import android.annotation.TargetApi;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import android.os.Build;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
public class EGLBase { // API >= 17
private static final boolean DEBUG = false; // TODO set false on release
private static final String TAG = "EGLBase";
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
private EGLConfig mEglConfig = null;
private EGLContext mEglContext = EGL14.EGL_NO_CONTEXT;
private EGLDisplay mEglDisplay = EGL14.EGL_NO_DISPLAY;
private EGLContext mDefaultContext = EGL14.EGL_NO_CONTEXT;
public static class EglSurface {
private final EGLBase mEgl;
private EGLSurface mEglSurface = EGL14.EGL_NO_SURFACE;
private final int mWidth, mHeight;
EglSurface(final EGLBase egl, final Object surface) {
if (DEBUG) Log.v(TAG, "EglSurface:");
if (!(surface instanceof SurfaceView)
&& !(surface instanceof Surface)
&& !(surface instanceof SurfaceHolder)
&& !(surface instanceof SurfaceTexture))
throw new IllegalArgumentException("unsupported surface");
mEgl = egl;
mEglSurface = mEgl.createWindowSurface(surface);
mWidth = mEgl.querySurface(mEglSurface, EGL14.EGL_WIDTH);
mHeight = mEgl.querySurface(mEglSurface, EGL14.EGL_HEIGHT);
if (DEBUG) Log.v(TAG, String.format("EglSurface:size(%d,%d)", mWidth, mHeight));
}
EglSurface(final EGLBase egl, final int width, final int height) {
if (DEBUG) Log.v(TAG, "EglSurface:");
mEgl = egl;
mEglSurface = mEgl.createOffscreenSurface(width, height);
mWidth = width;
mHeight = height;
}
public void makeCurrent() {
mEgl.makeCurrent(mEglSurface);
}
public void swap() {
mEgl.swap(mEglSurface);
}
public EGLContext getContext() {
return mEgl.getContext();
}
public void release() {
if (DEBUG) Log.v(TAG, "EglSurface:release:");
mEgl.makeDefault();
mEgl.destroyWindowSurface(mEglSurface);
mEglSurface = EGL14.EGL_NO_SURFACE;
}
public int getWidth() {
return mWidth;
}
public int getHeight() {
return mHeight;
}
}
public EGLBase(final EGLContext shared_context, final boolean with_depth_buffer, final boolean isRecordable) {
if (DEBUG) Log.v(TAG, "EGLBase:");
init(shared_context, with_depth_buffer, isRecordable);
}
public void release() {
if (DEBUG) Log.v(TAG, "release:");
if (mEglDisplay != EGL14.EGL_NO_DISPLAY) {
destroyContext();
EGL14.eglTerminate(mEglDisplay);
EGL14.eglReleaseThread();
}
mEglDisplay = EGL14.EGL_NO_DISPLAY;
mEglContext = EGL14.EGL_NO_CONTEXT;
}
public EglSurface createFromSurface(final Object surface) {
if (DEBUG) Log.v(TAG, "createFromSurface:");
final EglSurface eglSurface = new EglSurface(this, surface);
eglSurface.makeCurrent();
return eglSurface;
}
public EglSurface createOffscreen(final int width, final int height) {
if (DEBUG) Log.v(TAG, "createOffscreen:");
final EglSurface eglSurface = new EglSurface(this, width, height);
eglSurface.makeCurrent();
return eglSurface;
}
public EGLContext getContext() {
return mEglContext;
}
public int querySurface(final EGLSurface eglSurface, final int what) {
final int[] value = new int[1];
EGL14.eglQuerySurface(mEglDisplay, eglSurface, what, value, 0);
return value[0];
}
private void init(EGLContext shared_context, final boolean with_depth_buffer, final boolean isRecordable) {
if (DEBUG) Log.v(TAG, "init:");
if (mEglDisplay != EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("EGL already set up");
}
mEglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (mEglDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("eglGetDisplay failed");
}
final int[] version = new int[2];
if (!EGL14.eglInitialize(mEglDisplay, version, 0, version, 1)) {
mEglDisplay = null;
throw new RuntimeException("eglInitialize failed");
}
shared_context = shared_context != null ? shared_context : EGL14.EGL_NO_CONTEXT;
if (mEglContext == EGL14.EGL_NO_CONTEXT) {
mEglConfig = getConfig(with_depth_buffer, isRecordable);
if (mEglConfig == null) {
throw new RuntimeException("chooseConfig failed");
}
// create EGL rendering context
mEglContext = createContext(shared_context);
}
// confirm whether the EGL rendering context is successfully created
final int[] values = new int[1];
EGL14.eglQueryContext(mEglDisplay, mEglContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, values, 0);
if (DEBUG) Log.d(TAG, "EGLContext created, client version " + values[0]);
makeDefault(); // makeCurrent(EGL14.EGL_NO_SURFACE);
}
/**
* change context to draw this window surface
* @return
*/
private boolean makeCurrent(final EGLSurface surface) {
// if (DEBUG) Log.v(TAG, "makeCurrent:");
if (mEglDisplay == null) {
if (DEBUG) Log.d(TAG, "makeCurrent:eglDisplay not initialized");
}
if (surface == null || surface == EGL14.EGL_NO_SURFACE) {
final int error = EGL14.eglGetError();
if (error == EGL14.EGL_BAD_NATIVE_WINDOW) {
Log.e(TAG, "makeCurrent:returned EGL_BAD_NATIVE_WINDOW.");
}
return false;
}
// attach EGL renderring context to specific EGL window surface
if (!EGL14.eglMakeCurrent(mEglDisplay, surface, surface, mEglContext)) {
Log.w(TAG, "eglMakeCurrent:" + EGL14.eglGetError());
return false;
}
return true;
}
private void makeDefault() {
if (DEBUG) Log.v(TAG, "makeDefault:");
if (!EGL14.eglMakeCurrent(mEglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
Log.w("TAG", "makeDefault" + EGL14.eglGetError());
}
}
private int swap(final EGLSurface surface) {
// if (DEBUG) Log.v(TAG, "swap:");
if (!EGL14.eglSwapBuffers(mEglDisplay, surface)) {
final int err = EGL14.eglGetError();
if (DEBUG) Log.w(TAG, "swap:err=" + err);
return err;
}
return EGL14.EGL_SUCCESS;
}
private EGLContext createContext(final EGLContext shared_context) {
// if (DEBUG) Log.v(TAG, "createContext:");
final int[] attrib_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
final EGLContext context = EGL14.eglCreateContext(mEglDisplay, mEglConfig, shared_context, attrib_list, 0);
checkEglError("eglCreateContext");
return context;
}
private void destroyContext() {
if (DEBUG) Log.v(TAG, "destroyContext:");
if (!EGL14.eglDestroyContext(mEglDisplay, mEglContext)) {
Log.e("destroyContext", "display:" + mEglDisplay + " context: " + mEglContext);
Log.e(TAG, "eglDestroyContex:" + EGL14.eglGetError());
}
mEglContext = EGL14.EGL_NO_CONTEXT;
if (mDefaultContext != EGL14.EGL_NO_CONTEXT) {
if (!EGL14.eglDestroyContext(mEglDisplay, mDefaultContext)) {
Log.e("destroyContext", "display:" + mEglDisplay + " context: " + mDefaultContext);
Log.e(TAG, "eglDestroyContex:" + EGL14.eglGetError());
}
mDefaultContext = EGL14.EGL_NO_CONTEXT;
}
}
private EGLSurface createWindowSurface(final Object nativeWindow) {
if (DEBUG) Log.v(TAG, "createWindowSurface:nativeWindow=" + nativeWindow);
final int[] surfaceAttribs = {
EGL14.EGL_NONE
};
EGLSurface result = null;
try {
result = EGL14.eglCreateWindowSurface(mEglDisplay, mEglConfig, nativeWindow, surfaceAttribs, 0);
} catch (final IllegalArgumentException e) {
Log.e(TAG, "eglCreateWindowSurface", e);
}
return result;
}
/**
* Creates an EGL surface associated with an offscreen buffer.
*/
private EGLSurface createOffscreenSurface(final int width, final int height) {
if (DEBUG) Log.v(TAG, "createOffscreenSurface:");
final int[] surfaceAttribs = {
EGL14.EGL_WIDTH, width,
EGL14.EGL_HEIGHT, height,
EGL14.EGL_NONE
};
EGLSurface result = null;
try {
result = EGL14.eglCreatePbufferSurface(mEglDisplay, mEglConfig, surfaceAttribs, 0);
checkEglError("eglCreatePbufferSurface");
if (result == null) {
throw new RuntimeException("surface was null");
}
} catch (final IllegalArgumentException e) {
Log.e(TAG, "createOffscreenSurface", e);
} catch (final RuntimeException e) {
Log.e(TAG, "createOffscreenSurface", e);
}
return result;
}
private void destroyWindowSurface(EGLSurface surface) {
if (DEBUG) Log.v(TAG, "destroySurface:");
if (surface != EGL14.EGL_NO_SURFACE) {
EGL14.eglMakeCurrent(mEglDisplay,
EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT);
EGL14.eglDestroySurface(mEglDisplay, surface);
}
surface = EGL14.EGL_NO_SURFACE;
if (DEBUG) Log.v(TAG, "destroySurface:finished");
}
private void checkEglError(final String msg) {
int error;
if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
}
}
@SuppressWarnings("unused")
private EGLConfig getConfig(final boolean with_depth_buffer, final boolean isRecordable) {
final int[] attribList = {
EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL14.EGL_RED_SIZE, 8,
EGL14.EGL_GREEN_SIZE, 8,
EGL14.EGL_BLUE_SIZE, 8,
EGL14.EGL_ALPHA_SIZE, 8,
EGL14.EGL_NONE, EGL14.EGL_NONE, //EGL14.EGL_STENCIL_SIZE, 8,
EGL14.EGL_NONE, EGL14.EGL_NONE, //EGL_RECORDABLE_ANDROID, 1, // this flag need to recording of MediaCodec
EGL14.EGL_NONE, EGL14.EGL_NONE, // with_depth_buffer ? EGL14.EGL_DEPTH_SIZE : EGL14.EGL_NONE,
// with_depth_buffer ? 16 : 0,
EGL14.EGL_NONE
};
int offset = 10;
if (false) { // ステンシルバッファ(常時未使用)
attribList[offset++] = EGL14.EGL_STENCIL_SIZE;
attribList[offset++] = 8;
}
if (with_depth_buffer) { // デプスバッファ
attribList[offset++] = EGL14.EGL_DEPTH_SIZE;
attribList[offset++] = 16;
}
if (isRecordable && (Build.VERSION.SDK_INT >= 18)) {// MediaCodecの入力用Surfaceの場合
attribList[offset++] = EGL_RECORDABLE_ANDROID;
attribList[offset++] = 1;
}
for (int i = attribList.length - 1; i >= offset; i--) {
attribList[i] = EGL14.EGL_NONE;
}
final EGLConfig[] configs = new EGLConfig[1];
final int[] numConfigs = new int[1];
if (!EGL14.eglChooseConfig(mEglDisplay, attribList, 0, configs, 0, configs.length, numConfigs, 0)) {
// XXX it will be better to fallback to RGB565
Log.w(TAG, "unable to find RGBA8888 / " + " EGLConfig");
return null;
}
return configs[0];
}
}

View File

@@ -0,0 +1,189 @@
package me.lake.librestreaming.encoder.utils;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.Matrix;
import android.util.Log;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
/**
* Helper class to draw to whole view using specific texture and texture matrix
*/
public class GLDrawer2D {
private static final boolean DEBUG = false; // TODO set false on release
private static final String TAG = "GLDrawer2D";
private static final String vss
= "uniform mat4 uMVPMatrix;\n"
+ "uniform mat4 uTexMatrix;\n"
+ "attribute highp vec4 aPosition;\n"
+ "attribute highp vec4 aTextureCoord;\n"
+ "varying highp vec2 vTextureCoord;\n"
+ "\n"
+ "void main() {\n"
+ " gl_Position = uMVPMatrix * aPosition;\n"
+ " vTextureCoord = (uTexMatrix * aTextureCoord).xy;\n"
+ "}\n";
private static final String fss
= "precision mediump float;\n"
+ "uniform sampler2D sTexture;\n"
+ "varying highp vec2 vTextureCoord;\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(sTexture, vTextureCoord);\n"
+ "}";
private static final float[] VERTICES = { 1.0f, 1.0f, -1.0f, 1.0f, 1.0f, -1.0f, -1.0f, -1.0f };
//private static final float[] TEXCOORD = { 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f };
private static final float[] TEXCOORD = { 1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f };
private final FloatBuffer pVertex;
private final FloatBuffer pTexCoord;
private int hProgram;
int maPositionLoc;
int maTextureCoordLoc;
int muMVPMatrixLoc;
int muTexMatrixLoc;
private final float[] mMvpMatrix = new float[16];
private static final int FLOAT_SZ = Float.SIZE / 8;
private static final int VERTEX_NUM = 4;
private static final int VERTEX_SZ = VERTEX_NUM * 2;
/**
* Constructor
* this should be called in GL context
*/
public GLDrawer2D() {
pVertex = ByteBuffer.allocateDirect(VERTEX_SZ * FLOAT_SZ)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
pVertex.put(VERTICES);
pVertex.flip();
pTexCoord = ByteBuffer.allocateDirect(VERTEX_SZ * FLOAT_SZ)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
pTexCoord.put(TEXCOORD);
pTexCoord.flip();
hProgram = loadShader(vss, fss);
GLES20.glUseProgram(hProgram);
maPositionLoc = GLES20.glGetAttribLocation(hProgram, "aPosition");
maTextureCoordLoc = GLES20.glGetAttribLocation(hProgram, "aTextureCoord");
muMVPMatrixLoc = GLES20.glGetUniformLocation(hProgram, "uMVPMatrix");
muTexMatrixLoc = GLES20.glGetUniformLocation(hProgram, "uTexMatrix");
Matrix.setIdentityM(mMvpMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixLoc, 1, false, mMvpMatrix, 0);
GLES20.glUniformMatrix4fv(muTexMatrixLoc, 1, false, mMvpMatrix, 0);
GLES20.glVertexAttribPointer(maPositionLoc, 2, GLES20.GL_FLOAT, false, VERTEX_SZ, pVertex);
GLES20.glVertexAttribPointer(maTextureCoordLoc, 2, GLES20.GL_FLOAT, false, VERTEX_SZ, pTexCoord);
GLES20.glEnableVertexAttribArray(maPositionLoc);
GLES20.glEnableVertexAttribArray(maTextureCoordLoc);
}
/**
* terminatinng, this should be called in GL context
*/
public void release() {
if (hProgram >= 0)
GLES20.glDeleteProgram(hProgram);
hProgram = -1;
}
/**
* draw specific texture with specific texture matrix
* @param tex_id texture ID
* @param tex_matrix texture matrix、if this is null, the last one use(we don't check size of this array and needs at least 16 of float)
*/
public void draw(final int tex_id, final float[] tex_matrix) {
GLES20.glUseProgram(hProgram);
if (tex_matrix != null)
GLES20.glUniformMatrix4fv(muTexMatrixLoc, 1, false, tex_matrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixLoc, 1, false, mMvpMatrix, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex_id);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, VERTEX_NUM);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glUseProgram(0);
}
/**
* Set model/view/projection transform matrix
* @param matrix
* @param offset
*/
public void setMatrix(final float[] matrix, final int offset) {
if ((matrix != null) && (matrix.length >= offset + 16)) {
System.arraycopy(matrix, offset, mMvpMatrix, 0, 16);
} else {
Matrix.setIdentityM(mMvpMatrix, 0);
}
}
/**
* create external texture
* @return texture ID
*/
public static int initTex() {
if (DEBUG) Log.v(TAG, "initTex:");
final int[] tex = new int[1];
GLES20.glGenTextures(1, tex, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex[0]);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
return tex[0];
}
/**
* delete specific texture
*/
public static void deleteTex(final int hTex) {
if (DEBUG) Log.v(TAG, "deleteTex:");
final int[] tex = new int[] {hTex};
GLES20.glDeleteTextures(1, tex, 0);
}
/**
* load, compile and link shader
* @param vss source of vertex shader
* @param fss source of fragment shader
* @return
*/
public static int loadShader(final String vss, final String fss) {
if (DEBUG) Log.v(TAG, "loadShader:");
int vs = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
GLES20.glShaderSource(vs, vss);
GLES20.glCompileShader(vs);
final int[] compiled = new int[1];
GLES20.glGetShaderiv(vs, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
if (DEBUG) Log.e(TAG, "Failed to compile vertex shader:"
+ GLES20.glGetShaderInfoLog(vs));
GLES20.glDeleteShader(vs);
vs = 0;
}
int fs = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(fs, fss);
GLES20.glCompileShader(fs);
GLES20.glGetShaderiv(fs, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
if (DEBUG) Log.w(TAG, "Failed to compile fragment shader:"
+ GLES20.glGetShaderInfoLog(fs));
GLES20.glDeleteShader(fs);
fs = 0;
}
final int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vs);
GLES20.glAttachShader(program, fs);
GLES20.glLinkProgram(program);
return program;
}
}

View File

@@ -0,0 +1,211 @@
package me.lake.librestreaming.encoder.utils;
import android.graphics.SurfaceTexture;
import android.opengl.EGLContext;
import android.opengl.GLES20;
import android.opengl.Matrix;
import android.text.TextUtils;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
/**
* Helper class to draw texture to whole view on private thread
*/
public final class RenderHandler implements Runnable {
private static final boolean DEBUG = false; // TODO set false on release
private static final String TAG = "RenderHandler";
private final Object mSync = new Object();
private EGLContext mShard_context;
private boolean mIsRecordable;
private Object mSurface;
private int mTexId = -1;
private float[] mMatrix = new float[32];
private boolean mRequestSetEglContext;
private boolean mRequestRelease;
private int mRequestDraw;
public static final RenderHandler createHandler(final String name) {
if (DEBUG) Log.v(TAG, "createHandler:");
final RenderHandler handler = new RenderHandler();
synchronized (handler.mSync) {
new Thread(handler, !TextUtils.isEmpty(name) ? name : TAG).start();
try {
handler.mSync.wait();
} catch (final InterruptedException e) {
}
}
return handler;
}
public final void setEglContext(final EGLContext shared_context, final int tex_id, final Object surface, final boolean isRecordable) {
if (DEBUG) Log.i(TAG, "setEglContext:");
if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture) && !(surface instanceof SurfaceHolder))
throw new RuntimeException("unsupported window type:" + surface);
synchronized (mSync) {
if (mRequestRelease) return;
mShard_context = shared_context;
mTexId = tex_id;
mSurface = surface;
mIsRecordable = isRecordable;
mRequestSetEglContext = true;
Matrix.setIdentityM(mMatrix, 0);
Matrix.setIdentityM(mMatrix, 16);
mSync.notifyAll();
try {
mSync.wait();
} catch (final InterruptedException e) {
}
}
}
public final void draw() {
draw(mTexId, mMatrix, null);
}
public final void draw(final int tex_id) {
draw(tex_id, mMatrix, null);
}
public final void draw(final float[] tex_matrix) {
draw(mTexId, tex_matrix, null);
}
public final void draw(final float[] tex_matrix, final float[] mvp_matrix) {
draw(mTexId, tex_matrix, mvp_matrix);
}
public final void draw(final int tex_id, final float[] tex_matrix) {
draw(tex_id, tex_matrix, null);
}
public final void draw(final int tex_id, final float[] tex_matrix, final float[] mvp_matrix) {
synchronized (mSync) {
if (mRequestRelease) return;
mTexId = tex_id;
if ((tex_matrix != null) && (tex_matrix.length >= 16)) {
System.arraycopy(tex_matrix, 0, mMatrix, 0, 16);
} else {
Matrix.setIdentityM(mMatrix, 0);
}
if ((mvp_matrix != null) && (mvp_matrix.length >= 16)) {
System.arraycopy(mvp_matrix, 0, mMatrix, 16, 16);
} else {
Matrix.setIdentityM(mMatrix, 16);
}
mRequestDraw++;
mSync.notifyAll();
/* try {
mSync.wait();
} catch (final InterruptedException e) {
} */
}
}
public boolean isValid() {
synchronized (mSync) {
return !(mSurface instanceof Surface) || ((Surface)mSurface).isValid();
}
}
public final void release() {
if (DEBUG) Log.i(TAG, "release:");
synchronized (mSync) {
if (mRequestRelease) return;
mRequestRelease = true;
mSync.notifyAll();
try {
mSync.wait();
} catch (final InterruptedException e) {
}
}
}
//********************************************************************************
//********************************************************************************
private EGLBase mEgl;
private EGLBase.EglSurface mInputSurface;
private GLDrawer2D mDrawer;
@Override
public final void run() {
if (DEBUG) Log.i(TAG, "RenderHandler thread started:");
synchronized (mSync) {
mRequestSetEglContext = mRequestRelease = false;
mRequestDraw = 0;
mSync.notifyAll();
}
boolean localRequestDraw;
for (;;) {
synchronized (mSync) {
if (mRequestRelease) break;
if (mRequestSetEglContext) {
mRequestSetEglContext = false;
internalPrepare();
}
localRequestDraw = mRequestDraw > 0;
if (localRequestDraw) {
mRequestDraw--;
// mSync.notifyAll();
}
}
if (localRequestDraw) {
if ((mEgl != null) && mTexId >= 0) {
mInputSurface.makeCurrent();
// clear screen with yellow color so that you can see rendering rectangle
GLES20.glClearColor(1.0f, 1.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
mDrawer.setMatrix(mMatrix, 16);
mDrawer.draw(mTexId, mMatrix);
mInputSurface.swap();
}
} else {
synchronized(mSync) {
try {
mSync.wait();
} catch (final InterruptedException e) {
break;
}
}
}
}
synchronized (mSync) {
mRequestRelease = true;
internalRelease();
mSync.notifyAll();
}
if (DEBUG) Log.i(TAG, "RenderHandler thread finished:");
}
private final void internalPrepare() {
if (DEBUG) Log.i(TAG, "internalPrepare:");
internalRelease();
mEgl = new EGLBase(mShard_context, false, mIsRecordable);
mInputSurface = mEgl.createFromSurface(mSurface);
mInputSurface.makeCurrent();
mDrawer = new GLDrawer2D();
mSurface = null;
mSync.notifyAll();
}
private final void internalRelease() {
if (DEBUG) Log.i(TAG, "internalRelease:");
if (mInputSurface != null) {
mInputSurface.release();
mInputSurface = null;
}
if (mDrawer != null) {
mDrawer.release();
mDrawer = null;
}
if (mEgl != null) {
mEgl.release();
mEgl = null;
}
}
}

View File

@@ -0,0 +1,31 @@
package me.lake.librestreaming.filter.hardvideofilter;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import me.lake.librestreaming.core.GLHelper;
public class BaseHardVideoFilter {
protected int SIZE_WIDTH;
protected int SIZE_HEIGHT;
protected int directionFlag=-1;
protected ShortBuffer drawIndecesBuffer;
public void onInit(int VWidth, int VHeight) {
SIZE_WIDTH = VWidth;
SIZE_HEIGHT = VHeight;
drawIndecesBuffer = GLHelper.getDrawIndecesBuffer();
}
public void onDraw(final int cameraTexture,final int targetFrameBuffer, final FloatBuffer shapeBuffer, final FloatBuffer textrueBuffer) {
}
public void onDestroy() {
}
public void onDirectionUpdate(int _directionFlag) {
this.directionFlag = _directionFlag;
}
}

View File

@@ -0,0 +1,92 @@
package me.lake.librestreaming.filter.hardvideofilter;
import android.opengl.GLES20;
import java.nio.FloatBuffer;
import java.util.LinkedList;
import java.util.List;
import me.lake.librestreaming.tools.GLESTools;
public class HardVideoGroupFilter extends BaseHardVideoFilter {
private LinkedList<FilterWrapper> filterWrappers;
public HardVideoGroupFilter(List<BaseHardVideoFilter> filters) {
if (filters == null || filters.isEmpty()) {
throw new IllegalArgumentException("can not create empty GroupFilter");
}
filterWrappers = new LinkedList<FilterWrapper>();
for (BaseHardVideoFilter filter : filters) {
filterWrappers.add(new FilterWrapper(filter));
}
}
@Override
public void onInit(int VWidth, int VHeight) {
super.onInit(VWidth, VHeight);
int i = 0;
for (FilterWrapper wrapper : filterWrappers) {
wrapper.filter.onInit(VWidth, VHeight);
int[] frameBuffer = new int[1];
int[] frameBufferTexture = new int[1];
GLESTools.createFrameBuff(frameBuffer,
frameBufferTexture,
SIZE_WIDTH,
SIZE_HEIGHT);
wrapper.frameBuffer = frameBuffer[0];
wrapper.frameBufferTexture = frameBufferTexture[0];
i++;
}
}
@Override
public void onDraw(int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textrueBuffer) {
FilterWrapper preFilterWrapper = null;
int i = 0;
int texture;
for (FilterWrapper wrapper : filterWrappers) {
if (preFilterWrapper == null) {
texture = cameraTexture;
} else {
texture = preFilterWrapper.frameBufferTexture;
}
if (i == (filterWrappers.size() - 1)) {
wrapper.filter.onDraw(texture, targetFrameBuffer, shapeBuffer, textrueBuffer);
} else {
wrapper.filter.onDraw(texture, wrapper.frameBuffer, shapeBuffer, textrueBuffer);
}
preFilterWrapper = wrapper;
i++;
}
}
@Override
public void onDestroy() {
super.onDestroy();
for (FilterWrapper wrapper : filterWrappers) {
wrapper.filter.onDestroy();
GLES20.glDeleteFramebuffers(1, new int[]{wrapper.frameBuffer}, 0);
GLES20.glDeleteTextures(1, new int[]{wrapper.frameBufferTexture}, 0);
}
}
@Override
public void onDirectionUpdate(int _directionFlag) {
super.onDirectionUpdate(_directionFlag);
for (FilterWrapper wrapper : filterWrappers) {
wrapper.filter.onDirectionUpdate(_directionFlag);
}
}
private class FilterWrapper {
BaseHardVideoFilter filter;
int frameBuffer;
int frameBufferTexture;
FilterWrapper(BaseHardVideoFilter filter) {
this.filter = filter;
}
}
}

View File

@@ -0,0 +1,97 @@
package me.lake.librestreaming.filter.hardvideofilter;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import java.nio.FloatBuffer;
import me.lake.librestreaming.tools.GLESTools;
public class OriginalHardVideoFilter extends BaseHardVideoFilter {
protected int glProgram;
protected int glTextureLoc;
protected int glCamPostionLoc;
protected int glCamTextureCoordLoc;
protected String vertexShader_filter = "" +
"attribute vec4 aCamPosition;\n" +
"attribute vec2 aCamTextureCoord;\n" +
"varying vec2 vCamTextureCoord;\n" +
"void main(){\n" +
" gl_Position= aCamPosition;\n" +
" vCamTextureCoord = aCamTextureCoord;\n" +
"}";
protected String fragmentshader_filter = "" +
"precision highp float;\n" +
"varying highp vec2 vCamTextureCoord;\n" +
"uniform sampler2D uCamTexture;\n" +
"void main(){\n" +
" vec4 color = texture2D(uCamTexture, vCamTextureCoord);\n" +
" gl_FragColor = color;\n" +
"}";
public OriginalHardVideoFilter(String vertexShaderCode, String fragmentShaderCode) {
if (vertexShaderCode != null) {
vertexShader_filter = vertexShaderCode;
}
if (fragmentShaderCode != null) {
fragmentshader_filter = fragmentShaderCode;
}
}
@Override
public void onInit(int VWidth, int VHeight) {
super.onInit(VWidth, VHeight);
glProgram = GLESTools.createProgram(vertexShader_filter, fragmentshader_filter);
GLES20.glUseProgram(glProgram);
glTextureLoc = GLES20.glGetUniformLocation(glProgram, "uCamTexture");
glCamPostionLoc = GLES20.glGetAttribLocation(glProgram, "aCamPosition");
glCamTextureCoordLoc = GLES20.glGetAttribLocation(glProgram, "aCamTextureCoord");
}
@Override
public void onDraw(int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textrueBuffer) {
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, targetFrameBuffer);
GLES20.glUseProgram(glProgram);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, cameraTexture);
GLES20.glUniform1i(glTextureLoc, 0);
GLES20.glEnableVertexAttribArray(glCamPostionLoc);
GLES20.glEnableVertexAttribArray(glCamTextureCoordLoc);
shapeBuffer.position(0);
GLES20.glVertexAttribPointer(glCamPostionLoc, 2,
GLES20.GL_FLOAT, false,
2 * 4, shapeBuffer);
textrueBuffer.position(0);
GLES20.glVertexAttribPointer(glCamTextureCoordLoc, 2,
GLES20.GL_FLOAT, false,
2 * 4, textrueBuffer);
onPreDraw();
GLES20.glViewport(0, 0, SIZE_WIDTH, SIZE_HEIGHT);
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawIndecesBuffer.limit(), GLES20.GL_UNSIGNED_SHORT, drawIndecesBuffer);
GLES20.glFinish();
onAfterDraw();
GLES20.glDisableVertexAttribArray(glCamPostionLoc);
GLES20.glDisableVertexAttribArray(glCamTextureCoordLoc);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
GLES20.glUseProgram(0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}
protected void onPreDraw() {
}
protected void onAfterDraw() {
}
@Override
public void onDestroy() {
super.onDestroy();
GLES20.glDeleteProgram(glProgram);
}
}

View File

@@ -0,0 +1,28 @@
package me.lake.librestreaming.filter.softaudiofilter;
public class BaseSoftAudioFilter {
protected int SIZE;
protected int SIZE_HALF;
public void onInit(int size) {
SIZE = size;
SIZE_HALF = size/2;
}
/**
*
* @param orignBuff
* @param targetBuff
* @param presentationTimeMs
* @param sequenceNum
* @return false to use orignBuff,true to use targetBuff
*/
public boolean onFrame(byte[] orignBuff, byte[] targetBuff, long presentationTimeMs, int sequenceNum) {
return false;
}
public void onDestroy() {
}
}

View File

@@ -0,0 +1,36 @@
package me.lake.librestreaming.filter.softvideofilter;
public class BaseSoftVideoFilter {
protected int SIZE_WIDTH;
protected int SIZE_HEIGHT;
protected int SIZE_Y;
protected int SIZE_TOTAL;
protected int SIZE_U;
protected int SIZE_UV;
public void onInit(int VWidth, int VHeight) {
SIZE_WIDTH = VWidth;
SIZE_HEIGHT = VHeight;
SIZE_Y = SIZE_HEIGHT * SIZE_WIDTH;
SIZE_UV = SIZE_HEIGHT * SIZE_WIDTH / 2;
SIZE_U = SIZE_UV / 2;
SIZE_TOTAL = SIZE_Y * 3 / 2;
}
/**
*
* @param orignBuff
* @param targetBuff
* @param presentationTimeMs
* @param sequenceNum
* @return false to use orignBuff,true to use targetBuff
*/
public boolean onFrame(byte[] orignBuff, byte[] targetBuff, long presentationTimeMs, int sequenceNum) {
return false;
}
public void onDestroy() {
}
}

View File

@@ -0,0 +1,19 @@
package me.lake.librestreaming.model;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
public class MediaCodecGLWapper {
public EGLDisplay eglDisplay;
public EGLConfig eglConfig;
public EGLSurface eglSurface;
public EGLContext eglContext;
public int drawProgram;
public int drawTextureLoc;
public int drawPostionLoc;
public int drawTextureCoordLoc;
}

View File

@@ -0,0 +1,25 @@
package me.lake.librestreaming.model;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
public class OffScreenGLWapper {
public EGLDisplay eglDisplay;
public EGLConfig eglConfig;
public EGLSurface eglSurface;
public EGLContext eglContext;
public int cam2dProgram;
public int cam2dTextureMatrix;
public int cam2dTextureLoc;
public int cam2dPostionLoc;
public int cam2dTextureCoordLoc;
public int camProgram;
public int camTextureLoc;
public int camPostionLoc;
public int camTextureCoordLoc;
}

View File

@@ -0,0 +1,14 @@
package me.lake.librestreaming.model;
public class RESAudioBuff {
public boolean isReadyToFill;
public int audioFormat = -1;
public byte[] buff;
public RESAudioBuff(int audioFormat, int size) {
isReadyToFill = true;
this.audioFormat = audioFormat;
buff = new byte[size];
}
}

View File

@@ -0,0 +1,212 @@
package me.lake.librestreaming.model;
import android.hardware.Camera;
public class RESConfig {
public static class FilterMode {
public static final int HARD = RESCoreParameters.FILTER_MODE_HARD;
public static final int SOFT = RESCoreParameters.FILTER_MODE_SOFT;
}
public static class RenderingMode {
public static final int NativeWindow = RESCoreParameters.RENDERING_MODE_NATIVE_WINDOW;
public static final int OpenGLES = RESCoreParameters.RENDERING_MODE_OPENGLES;
}
public static class DirectionMode {
public static final int FLAG_DIRECTION_FLIP_HORIZONTAL = RESCoreParameters.FLAG_DIRECTION_FLIP_HORIZONTAL;
public static final int FLAG_DIRECTION_FLIP_VERTICAL = RESCoreParameters.FLAG_DIRECTION_FLIP_VERTICAL;
public static final int FLAG_DIRECTION_ROATATION_0 = RESCoreParameters.FLAG_DIRECTION_ROATATION_0;
public static final int FLAG_DIRECTION_ROATATION_90 = RESCoreParameters.FLAG_DIRECTION_ROATATION_90;
public static final int FLAG_DIRECTION_ROATATION_180 = RESCoreParameters.FLAG_DIRECTION_ROATATION_180;
public static final int FLAG_DIRECTION_ROATATION_270 = RESCoreParameters.FLAG_DIRECTION_ROATATION_270;
}
private int filterMode;
private Size targetVideoSize;
private int videoBufferQueueNum;
private int bitRate;
private String rtmpAddr;
private int renderingMode;
private int defaultCamera;
private int frontCameraDirectionMode;
private int backCameraDirectionMode;
private int videoFPS;
private int videoGOP;
private boolean printDetailMsg;
private Size targetPreviewSize;
private RESConfig() {
}
public static RESConfig obtain() {
RESConfig res = new RESConfig();
res.setFilterMode(FilterMode.SOFT);
res.setRenderingMode(RenderingMode.NativeWindow);
res.setTargetVideoSize(new Size(1920, 1080));
res.setVideoFPS(60);
res.setVideoGOP(1);
res.setVideoBufferQueueNum(1);
res.setBitRate(10*1024*1024);
res.setPrintDetailMsg(false);
res.setDefaultCamera(Camera.CameraInfo.CAMERA_FACING_BACK);
res.setBackCameraDirectionMode(DirectionMode.FLAG_DIRECTION_ROATATION_0);
res.setFrontCameraDirectionMode(DirectionMode.FLAG_DIRECTION_ROATATION_0);
return res;
}
/**
* set the filter mode.
*
* @param filterMode {@link FilterMode}
*/
public void setFilterMode(int filterMode) {
this.filterMode = filterMode;
}
/**
* set the default camera to start stream
*/
public void setDefaultCamera(int defaultCamera) {
this.defaultCamera = defaultCamera;
}
/**
* set front camera rotation & flip
*
* @param frontCameraDirectionMode {@link DirectionMode}
*/
public void setFrontCameraDirectionMode(int frontCameraDirectionMode) {
this.frontCameraDirectionMode = frontCameraDirectionMode;
}
/**
* set front camera rotation & flip
*
* @param backCameraDirectionMode {@link DirectionMode}
*/
public void setBackCameraDirectionMode(int backCameraDirectionMode) {
this.backCameraDirectionMode = backCameraDirectionMode;
}
/**
* set renderingMode when using soft mode<br/>
* no use for hard mode
*
* @param renderingMode {@link RenderingMode}
*/
public void setRenderingMode(int renderingMode) {
this.renderingMode = renderingMode;
}
/**
* no use for now
*
* @param printDetailMsg
*/
public void setPrintDetailMsg(boolean printDetailMsg) {
this.printDetailMsg = printDetailMsg;
}
/**
* set the target video size.<br/>
* real video size may different from it.Depend on device.
*
* @param videoSize
*/
public void setTargetVideoSize(Size videoSize) {
targetVideoSize = videoSize;
}
/**
* set video buffer number for soft mode.<br/>
* num larger:video Smoother,more memory.
*
* @param num
*/
public void setVideoBufferQueueNum(int num) {
videoBufferQueueNum = num;
}
/**
* set video bitrate
*
* @param bitRate
*/
public void setBitRate(int bitRate) {
this.bitRate = bitRate;
}
public int getVideoFPS() {
return videoFPS;
}
public void setVideoFPS(int videoFPS) {
this.videoFPS = videoFPS;
}
public int getVideoGOP(){
return videoGOP;
}
public void setVideoGOP(int videoGOP){
this.videoGOP = videoGOP;
}
public int getVideoBufferQueueNum() {
return videoBufferQueueNum;
}
public int getBitRate() {
return bitRate;
}
public Size getTargetVideoSize() {
return targetVideoSize;
}
public int getFilterMode() {
return filterMode;
}
public int getDefaultCamera() {
return defaultCamera;
}
public int getBackCameraDirectionMode() {
return backCameraDirectionMode;
}
public int getFrontCameraDirectionMode() {
return frontCameraDirectionMode;
}
public int getRenderingMode() {
return renderingMode;
}
public String getRtmpAddr() {
return rtmpAddr;
}
public void setRtmpAddr(String rtmpAddr) {
this.rtmpAddr = rtmpAddr;
}
public boolean isPrintDetailMsg() {
return printDetailMsg;
}
public void setTargetPreviewSize(Size previewSize) {
targetPreviewSize = previewSize;
}
public Size getTargetPreviewSize() {
return targetPreviewSize;
}
}

View File

@@ -0,0 +1,119 @@
package me.lake.librestreaming.model;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import me.lake.librestreaming.tools.LogTools;
import me.lake.librestreaming.ws.StreamAVOption;
public class RESCoreParameters {
public static final int FILTER_MODE_HARD = 1;
public static final int FILTER_MODE_SOFT = 2;
public static final int RENDERING_MODE_NATIVE_WINDOW = 1;
public static final int RENDERING_MODE_OPENGLES = 2;
/**
* same with jni
*/
public static final int FLAG_DIRECTION_FLIP_HORIZONTAL = 0x01;
public static final int FLAG_DIRECTION_FLIP_VERTICAL = 0x02;
public static final int FLAG_DIRECTION_ROATATION_0 = 0x10;
public static final int FLAG_DIRECTION_ROATATION_90 = 0x20;
public static final int FLAG_DIRECTION_ROATATION_180 = 0x40;
public static final int FLAG_DIRECTION_ROATATION_270 = 0x80;
public boolean done;
public boolean printDetailMsg;
public int filterMode;
public int renderingMode;
public String rtmpAddr;
public int frontCameraDirectionMode;
public int backCameraDirectionMode;
public boolean isPortrait;
public int previewVideoWidth;
public int previewVideoHeight;
public int videoWidth;
public int videoHeight;
public int videoFPS;
public int videoGOP;
public float cropRatio;
public int previewColorFormat;
public int previewBufferSize;
public int mediacodecAVCColorFormat;
public int mediacdoecAVCBitRate;
public int videoBufferQueueNum;
public int audioBufferQueueNum;
public int audioRecoderFormat;
public int audioRecoderSampleRate;
public int audioRecoderChannelConfig;
public int audioRecoderSliceSize;
public int audioRecoderSource;
public int audioRecoderBufferSize;
public int previewMaxFps;
public int previewMinFps;
public int mediacodecAVCFrameRate;
public int mediacodecAVCIFrameInterval;
public int mediacodecAVCProfile;
public int mediacodecAVClevel;
public int mediacodecAACProfile;
public int mediacodecAACSampleRate;
public int mediacodecAACChannelCount;
public int mediacodecAACBitRate;
public int mediacodecAACMaxInputSize;
//sender
public int senderQueueLength;
public RESCoreParameters() {
done = false;
printDetailMsg = false;
filterMode=-1;
videoWidth = StreamAVOption.videoWidth;
videoHeight = StreamAVOption.videoHeight;
previewVideoWidth = StreamAVOption.previewWidth;
previewVideoHeight = StreamAVOption.previewHeight;
videoFPS=StreamAVOption.videoFramerate;
videoGOP=StreamAVOption.videoGOP;
previewColorFormat = -1;
mediacodecAVCColorFormat = -1;
mediacdoecAVCBitRate = StreamAVOption.videoBitrate;
videoBufferQueueNum = -1;
audioBufferQueueNum = -1;
mediacodecAVCFrameRate = -1;
mediacodecAVCIFrameInterval = -1;
mediacodecAVCProfile = -1;
mediacodecAVClevel = -1;
mediacodecAACProfile = -1;
mediacodecAACSampleRate = -1;
mediacodecAACChannelCount = -1;
mediacodecAACBitRate = -1;
mediacodecAACMaxInputSize = -1;
}
public void dump() {
LogTools.e(this.toString());
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("ResParameter:");
Field[] fields = this.getClass().getDeclaredFields();
for (Field field : fields) {
if (Modifier.isStatic(field.getModifiers())) {
continue;
}
field.setAccessible(true);
try {
sb.append(field.getName());
sb.append('=');
sb.append(field.get(this));
sb.append(';');
} catch (IllegalAccessException e) {
}
}
return sb.toString();
}
}

View File

@@ -0,0 +1,18 @@
package me.lake.librestreaming.model;
import java.util.Arrays;
public class RESVideoBuff {
public boolean isReadyToFill;
public int colorFormat = -1;
public byte[] buff;
public RESVideoBuff(int colorFormat, int size) {
isReadyToFill = true;
this.colorFormat = colorFormat;
buff = new byte[size];
Arrays.fill(buff, size/2, size, (byte) 127);
}
}

View File

@@ -0,0 +1,19 @@
package me.lake.librestreaming.model;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
public class ScreenGLWapper {
public EGLDisplay eglDisplay;
public EGLConfig eglConfig;
public EGLSurface eglSurface;
public EGLContext eglContext;
public int drawProgram;
public int drawTextureLoc;
public int drawPostionLoc;
public int drawTextureCoordLoc;
}

View File

@@ -0,0 +1,86 @@
package me.lake.librestreaming.model;
public final class Size {
/**
* Create a new immutable Size instance.
*
* @param width The width of the size, in pixels
* @param height The height of the size, in pixels
*/
public Size(int width, int height) {
mWidth = width;
mHeight = height;
}
/**
* Get the width of the size (in pixels).
*
* @return width
*/
public int getWidth() {
return mWidth;
}
/**
* Get the height of the size (in pixels).
*
* @return height
*/
public int getHeight() {
return mHeight;
}
/**
* Check if this size is equal to another size.
* <p>
* Two sizes are equal if and only if both their widths and heights are
* equal.
* </p>
* <p>
* A size object is never equal to any other type of object.
* </p>
*
* @return {@code true} if the objects were equal, {@code false} otherwise
*/
@Override
public boolean equals(final Object obj) {
if (obj == null) {
return false;
}
if (this == obj) {
return true;
}
if (obj instanceof Size) {
Size other = (Size) obj;
return mWidth == other.mWidth && mHeight == other.mHeight;
}
return false;
}
/**
* Return the size represented as a string with the format {@code "WxH"}
*
* @return string representation of the size
*/
@Override
public String toString() {
return mWidth + "x" + mHeight;
}
private static NumberFormatException invalidSize(String s) {
throw new NumberFormatException("Invalid Size: \"" + s + "\"");
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode() {
// assuming most sizes are <2^16, doing a rotate will give us perfect hashing
return mHeight ^ ((mWidth << (Integer.SIZE / 2)) | (mWidth >>> (Integer.SIZE / 2)));
}
private final int mWidth;
private final int mHeight;
}

View File

@@ -0,0 +1,378 @@
package me.lake.librestreaming.render;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.nio.ShortBuffer;
import java.util.Arrays;
import javax.microedition.khronos.egl.EGL10;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.egl.EGLContext;
import javax.microedition.khronos.egl.EGLDisplay;
import javax.microedition.khronos.egl.EGLSurface;
import me.lake.librestreaming.tools.GLESTools;
public class GLESRender implements IRender {
private final Object syncRenderThread = new Object();
GLESRenderThread glesRenderThread;
@Override
public void create(SurfaceTexture visualSurfaceTexture, int pixelFormat, int pixelWidth, int pixelHeight, int visualWidth, int visualHeight) {
if (pixelFormat != ImageFormat.NV21) {
throw new IllegalArgumentException("GLESRender,pixelFormat only support NV21");
}
synchronized (syncRenderThread) {
glesRenderThread = new GLESRenderThread(visualSurfaceTexture,
pixelFormat,
pixelWidth,
pixelHeight,
visualWidth,
visualHeight);
glesRenderThread.start();
}
}
@Override
public void update(int visualWidth, int visualHeight) {
synchronized (syncRenderThread) {
glesRenderThread.updateVisualWH(visualWidth, visualHeight);
}
}
@Override
public void rendering(byte[] pixel) {
synchronized (syncRenderThread) {
glesRenderThread.updatePixel(pixel);
}
}
@Override
public void destroy(boolean releaseTexture) {
synchronized (syncRenderThread) {
glesRenderThread.quit(releaseTexture);
try {
glesRenderThread.join();
} catch (InterruptedException ignored) {
}
}
}
private static class GLESRenderThread extends Thread {
int mPixelWidth;
int mPixelHeight;
int mySize;
int mVisualWidth;
int mVisualHeight;
byte[] yTemp, uTemp, vTemp;
SurfaceTexture mVisualSurfaceTexture;
private final Object syncThread = new Object();
boolean quit = false;
boolean releaseTexture=true;
EGL10 mEgl;
EGLDisplay mEglDisplay;
EGLConfig mEglConfig;
EGLSurface mEglSurface;
EGLContext mEglContext;
int mProgram;
public GLESRenderThread(SurfaceTexture visualSurfaceTexture, int pixelFormat, int pixelWidth, int pixelHeight, int visualWidth, int visualHeight) {
quit = false;
mVisualSurfaceTexture = visualSurfaceTexture;
mPixelWidth = pixelWidth;
mPixelHeight = pixelHeight;
mySize = mPixelWidth * mPixelHeight;
mVisualWidth = visualWidth;
mVisualHeight = visualHeight;
yBuf = ByteBuffer.allocateDirect(mySize);
uBuf = ByteBuffer.allocateDirect(mySize >> 2);
vBuf = ByteBuffer.allocateDirect(mySize >> 2);
yTemp = new byte[mySize];
uTemp = new byte[mySize >> 2];
vTemp = new byte[mySize >> 2];
Arrays.fill(uTemp, (byte) 0x7F);
Arrays.fill(vTemp, (byte) 0x7F);
uBuf.position(0);
uBuf.put(uTemp).position(0);
vBuf.position(0);
vBuf.put(vTemp).position(0);
}
public void quit(boolean releaseTexture) {
synchronized (syncThread) {
this.releaseTexture = releaseTexture;
quit = true;
syncThread.notify();
}
}
public void updatePixel(byte[] pixel) {
synchronized (syncBuff) {
NV21TOYUV(pixel, yTemp, uTemp, vTemp, mPixelWidth, mPixelHeight);
yBuf.position(0);
yBuf.put(yTemp).position(0);
uBuf.position(0);
uBuf.put(uTemp).position(0);
vBuf.position(0);
vBuf.put(vTemp).position(0);
}
synchronized (syncThread) {
syncThread.notify();
}
}
public void updateVisualWH(int visualWidth, int visualHeight) {
mVisualWidth = visualWidth;
mVisualHeight = visualHeight;
}
@Override
public void run() {
initGLES();
mProgram = GLESTools.createProgram(vertexShaderCode, fragmentshaderCode);
initVertex();
initTexture();
while (!quit) {
drawFrame();
if (!mEgl.eglSwapBuffers(mEglDisplay, mEglSurface)) {
throw new RuntimeException("eglSwapBuffers,failed!");
}
synchronized (syncThread) {
try {
if(!quit) {
syncThread.wait();
}
} catch (InterruptedException ignored) {
}
}
}
releaseGLES();
if (releaseTexture) {
mVisualSurfaceTexture.release();
}
}
private void drawFrame() {
GLES20.glViewport(0, 0, mVisualWidth, mVisualHeight);
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glUseProgram(mProgram);
synchronized (syncBuff) {
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yTexture[0]);
GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0,
mPixelWidth,
mPixelHeight,
GLES20.GL_LUMINANCE,
GLES20.GL_UNSIGNED_BYTE,
yBuf);
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, uTexture[0]);
GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0,
mPixelWidth >> 1,
mPixelHeight >> 1,
GLES20.GL_LUMINANCE,
GLES20.GL_UNSIGNED_BYTE,
uBuf);
GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, vTexture[0]);
GLES20.glTexSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0,
mPixelWidth >> 1,
mPixelHeight >> 1,
GLES20.GL_LUMINANCE,
GLES20.GL_UNSIGNED_BYTE,
vBuf);
}
//=================================
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawIndices.length, GLES20.GL_UNSIGNED_SHORT, mDrawIndicesBuffer);
GLES20.glFinish();
}
private void initGLES() {
mEgl = (EGL10) EGLContext.getEGL();
mEglDisplay = mEgl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
if (EGL10.EGL_NO_DISPLAY == mEglDisplay) {
throw new RuntimeException("GLESRender,eglGetDisplay,failed:" + GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
int versions[] = new int[2];
if (!mEgl.eglInitialize(mEglDisplay, versions)) {
throw new RuntimeException("GLESRender,eglInitialize,failed:" + GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
int configsCount[] = new int[1];
EGLConfig configs[] = new EGLConfig[1];
int configSpec[] = new int[]{
EGL10.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
EGL10.EGL_RED_SIZE, 8,
EGL10.EGL_GREEN_SIZE, 8,
EGL10.EGL_BLUE_SIZE, 8,
EGL10.EGL_DEPTH_SIZE, 0,
EGL10.EGL_STENCIL_SIZE, 0,
EGL10.EGL_NONE
};
mEgl.eglChooseConfig(mEglDisplay, configSpec, configs, 1, configsCount);
if (configsCount[0] <= 0) {
throw new RuntimeException("GLESRender,eglChooseConfig,failed:" + GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
mEglConfig = configs[0];
mEglSurface = mEgl.eglCreateWindowSurface(mEglDisplay, mEglConfig, mVisualSurfaceTexture, null);
if (null == mEglSurface || EGL10.EGL_NO_SURFACE == mEglSurface) {
throw new RuntimeException("GLESRender,eglCreateWindowSurface,failed:" + GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
int contextSpec[] = new int[]{
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
EGL14.EGL_NONE
};
mEglContext = mEgl.eglCreateContext(mEglDisplay, mEglConfig, EGL10.EGL_NO_CONTEXT, contextSpec);
if (EGL10.EGL_NO_CONTEXT == mEglContext) {
throw new RuntimeException("GLESRender,eglCreateContext,failed:" + GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
if (!mEgl.eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mEglContext)) {
throw new RuntimeException("GLESRender,eglMakeCurrent,failed:" + GLUtils.getEGLErrorString(mEgl.eglGetError()));
}
GLES20.glDisable(GLES20.GL_DEPTH_TEST);
}
private void initVertex() {
mSquareVerticesBuffer = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * squareVertices.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
mSquareVerticesBuffer.put(squareVertices);
mSquareVerticesBuffer.position(0);
mTextureCoordsBuffer = ByteBuffer.allocateDirect(FLOAT_SIZE_BYTES * textureVertices.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
mTextureCoordsBuffer.put(textureVertices);
mTextureCoordsBuffer.position(0);
mDrawIndicesBuffer = ByteBuffer.allocateDirect(SHORT_SIZE_BYTES * drawIndices.length).
order(ByteOrder.nativeOrder()).
asShortBuffer();
mDrawIndicesBuffer.put(drawIndices);
mDrawIndicesBuffer.position(0);
}
private void initTexture() {
GLES20.glEnable(GLES20.GL_TEXTURE_2D);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glActiveTexture(GLES20.GL_TEXTURE2);
createTexture(mPixelWidth, mPixelHeight, GLES20.GL_LUMINANCE, yTexture);
createTexture(mPixelWidth >> 1, mPixelHeight >> 1, GLES20.GL_LUMINANCE, uTexture);
createTexture(mPixelWidth >> 1, mPixelHeight >> 1, GLES20.GL_LUMINANCE, vTexture);
GLES20.glUseProgram(mProgram);
sampleYLoaction = GLES20.glGetUniformLocation(mProgram, "samplerY");
sampleULoaction = GLES20.glGetUniformLocation(mProgram, "samplerU");
sampleVLoaction = GLES20.glGetUniformLocation(mProgram, "samplerV");
GLES20.glUniform1i(sampleYLoaction, 0);
GLES20.glUniform1i(sampleULoaction, 1);
GLES20.glUniform1i(sampleVLoaction, 2);
int aPostionLocation = GLES20.glGetAttribLocation(mProgram, "aPosition");
int aTextureCoordLocation = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
GLES20.glEnableVertexAttribArray(aPostionLocation);
GLES20.glVertexAttribPointer(aPostionLocation, SHAPE_COORD_PER_VERTEX,
GLES20.GL_FLOAT, false,
SHAPE_COORD_PER_VERTEX * 4, mSquareVerticesBuffer);
GLES20.glEnableVertexAttribArray(aTextureCoordLocation);
GLES20.glVertexAttribPointer(aTextureCoordLocation, TEXTURE_COORD_PER_VERTEX,
GLES20.GL_FLOAT, false,
TEXTURE_COORD_PER_VERTEX * 4, mTextureCoordsBuffer);
}
private void createTexture(int width, int height, int format, int[] texture) {
GLES20.glGenTextures(1, texture, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture[0]);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, format, width, height, 0, format, GLES20.GL_UNSIGNED_BYTE, null);
}
private void releaseGLES() {
GLES20.glDeleteProgram(mProgram);
GLES20.glDeleteTextures(1, yTexture, 0);
GLES20.glDeleteTextures(1, uTexture, 0);
GLES20.glDeleteTextures(1, vTexture, 0);
mEgl.eglMakeCurrent(mEglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT);
mEgl.eglDestroySurface(mEglDisplay, mEglSurface);
mEgl.eglDestroyContext(mEglDisplay, mEglContext);
mEgl.eglTerminate(mEglDisplay);
}
//Pixel Buff
private final Object syncBuff = new Object();
private ByteBuffer yBuf;
private ByteBuffer uBuf;
private ByteBuffer vBuf;
//texture
private int[] yTexture = new int[1];
private int[] uTexture = new int[1];
private int[] vTexture = new int[1];
private int sampleYLoaction;
private int sampleULoaction;
private int sampleVLoaction;
//shape vertices
private FloatBuffer mSquareVerticesBuffer;
private static float squareVertices[] = {
-1.0f, -1.0f, 0.0f,
1.0f, -1.0f, 0.0f,
1.0f, 1.0f, 0.0f,
-1.0f, 1.0f, 0.0f
};
//texture coordinate vertices
private FloatBuffer mTextureCoordsBuffer;
private static float textureVertices[] = {
0.0f, 1.0f,
1.0f, 1.0f,
1.0f, 0.0f,
0.0f, 0.0f
};
//gl draw order
private ShortBuffer mDrawIndicesBuffer;
private static short drawIndices[] = {0, 1, 2, 0, 2, 3};
private static int FLOAT_SIZE_BYTES = 4;
private static int SHORT_SIZE_BYTES = 2;
private static final int SHAPE_COORD_PER_VERTEX = 3;
private static final int TEXTURE_COORD_PER_VERTEX = 2;
private static String vertexShaderCode =
"attribute vec4 aPosition;\n" +
"attribute vec2 aTextureCoord;\n" +
"varying vec2 vTextureCoord;\n" +
"void main(){\n" +
" gl_Position= aPosition;\n" +
" vTextureCoord = aTextureCoord;\n" +
"}";
private static String fragmentshaderCode =
"varying lowp vec2 vTextureCoord;\n" +
"uniform sampler2D samplerY;\n" +
"uniform sampler2D samplerU;\n" +
"uniform sampler2D samplerV;\n" +
"const mediump mat3 yuv2rgb = mat3(1,1,1,0,-0.39465,2.03211,1.13983,-0.5806,0);\n" +
"void main(){\n" +
" mediump vec3 yuv;\n" +
" yuv.x = texture2D(samplerY,vTextureCoord).r;\n" +
" yuv.y = texture2D(samplerU,vTextureCoord).r - 0.5;\n" +
" yuv.z = texture2D(samplerV,vTextureCoord).r - 0.5;\n" +
" gl_FragColor = vec4(yuv2rgb*yuv,1);\n" +
"}";
}
@SuppressWarnings("all")
private static native void NV21TOYUV(byte[] src, byte[] dstY, byte[] dstU, byte[] dstV, int width, int height);
}

View File

@@ -0,0 +1,14 @@
package me.lake.librestreaming.render;
import android.graphics.SurfaceTexture;
public interface IRender {
void create(SurfaceTexture visualSurfaceTexture, int pixelFormat, int pixelWidth, int pixelHeight, int visualWidth, int visualHeight);
void update(int visualWidth, int visualHeight);
void rendering(byte[] pixel);
void destroy(boolean releaseTexture);
}

View File

@@ -0,0 +1,50 @@
package me.lake.librestreaming.render;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.view.Surface;
import me.lake.librestreaming.tools.LogTools;
public class NativeRender implements IRender {
Surface mVisualSurface;
int mPixelWidth;
int mPixelHeight;
int mPixelSize;
@Override
public void create(SurfaceTexture visualSurfaceTexture, int pixelFormat, int pixelWidth, int pixelHeight, int visualWidth, int visualHeight) {
if (pixelFormat != ImageFormat.NV21) {
throw new IllegalArgumentException("NativeRender,pixelFormat only support NV21");
}
mVisualSurface = new Surface(visualSurfaceTexture);
mPixelWidth = pixelWidth;
mPixelHeight = pixelHeight;
mPixelSize = (3 * pixelWidth * pixelHeight) / 2;
}
@Override
public void update(int visualWidth, int visualHeight) {
}
@Override
public void rendering(byte[] pixel) {
if (mVisualSurface != null && mVisualSurface.isValid()) {
renderingSurface(mVisualSurface, pixel, mPixelWidth, mPixelHeight, mPixelSize);
} else {
LogTools.d("NativeRender,rendering()invalid Surface");
}
}
@Override
public void destroy(boolean releaseTexture) {
if(releaseTexture) {
mVisualSurface.release();
}
}
@SuppressWarnings("all")
private native void renderingSurface(Surface surface, byte[] pixels, int w, int h, int s);
}

View File

@@ -0,0 +1,124 @@
package me.lake.librestreaming.rtmp;
import java.util.ArrayList;
import me.lake.librestreaming.model.RESCoreParameters;
/**
* This class is able to generate a FLVTAG in accordance with Adobe Flash Video File Format
* Specification v10.1 Annex E.5 with limited types available.
*/
public class FLvMetaData {
private static final String Name = "onMetaData";
private static final int ScriptData = 18;
private static final byte[] TS_SID = {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00};
private static final byte[] ObjEndMarker = {0x00, 0x00, 0x09};
private static final int EmptySize = 21;
private ArrayList<byte[]> MetaData;
private int DataSize;
private int pointer;
private byte[] MetaDataFrame;
public FLvMetaData() {
MetaData = new ArrayList<>();
DataSize = 0;
}
public FLvMetaData(RESCoreParameters coreParameters) {
this();
//Audio
//AAC
setProperty("audiocodecid", 10);
switch (coreParameters.mediacodecAACBitRate) {
case 32 * 1024:
setProperty("audiodatarate", 32);
break;
case 48 * 1024:
setProperty("audiodatarate", 48);
break;
case 64 * 1024:
setProperty("audiodatarate", 64);
break;
}
switch (coreParameters.mediacodecAACSampleRate) {
case 44100:
setProperty("audiosamplerate", 44100);
break;
default:
break;
}
//Video
//h264
setProperty("videocodecid", 7);
setProperty("framerate", coreParameters.mediacodecAVCFrameRate);
setProperty("width", coreParameters.videoWidth);
setProperty("height", coreParameters.videoHeight);
}
public void setProperty(String Key, int value) {
addProperty(toFlvString(Key), (byte) 0, toFlvNum(value));
}
public void setProperty(String Key, String value) {
addProperty(toFlvString(Key), (byte) 2, toFlvString(value));
}
private void addProperty(byte[] Key, byte datatype, byte[] data) {
int Propertysize = Key.length + 1 + data.length;
byte[] Property = new byte[Propertysize];
System.arraycopy(Key, 0, Property, 0, Key.length);
Property[Key.length] = datatype;
System.arraycopy(data, 0, Property, Key.length + 1, data.length);
MetaData.add(Property);
DataSize += Propertysize;
}
public byte[] getMetaData() {
MetaDataFrame = new byte[DataSize + EmptySize];
pointer = 0;
//SCRIPTDATA.name
Addbyte(2);
AddbyteArray(toFlvString(Name));
//SCRIPTDATA.value ECMA array
Addbyte(8);
AddbyteArray(toUI(MetaData.size(), 4));
for (byte[] Property : MetaData) {
AddbyteArray(Property);
}
AddbyteArray(ObjEndMarker);
return MetaDataFrame;
}
private void Addbyte(int value) {
MetaDataFrame[pointer] = (byte) value;
pointer++;
}
private void AddbyteArray(byte[] value) {
System.arraycopy(value, 0, MetaDataFrame, pointer, value.length);
pointer += value.length;
}
private byte[] toFlvString(String text) {
byte[] FlvString = new byte[text.length() + 2];
System.arraycopy(toUI(text.length(), 2), 0, FlvString, 0, 2);
System.arraycopy(text.getBytes(), 0, FlvString, 2, text.length());
return FlvString;
}
private byte[] toUI(long value, int bytes) {
byte[] UI = new byte[bytes];
for (int i = 0; i < bytes; i++) {
UI[bytes - 1 - i] = (byte) (value >> (8 * i) & 0xff);
}
return UI;
}
private byte[] toFlvNum(double value) {
long tmp = Double.doubleToLongBits(value);
return toUI(tmp, 8);
}
}

View File

@@ -0,0 +1,27 @@
package me.lake.librestreaming.rtmp;
public class RESFlvData {
public final static int FLV_RTMP_PACKET_TYPE_VIDEO = 9;
public final static int FLV_RTMP_PACKET_TYPE_AUDIO = 8;
public final static int FLV_RTMP_PACKET_TYPE_INFO = 18;
public final static int NALU_TYPE_IDR = 5;
public boolean droppable;
public int dts;//解码时间戳
public byte[] byteBuffer; //数据
public int size; //字节长度
public int flvTagType; //视频和音频的分类
public int videoFrameType;
public boolean isKeyframe() {
return videoFrameType == NALU_TYPE_IDR;
}
}

View File

@@ -0,0 +1,6 @@
package me.lake.librestreaming.rtmp;
public interface RESFlvDataCollecter {
void collect(RESFlvData flvData, int type);
}

View File

@@ -0,0 +1,308 @@
package me.lake.librestreaming.rtmp;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import me.lake.librestreaming.client.CallbackDelivery;
import me.lake.librestreaming.core.RESByteSpeedometer;
import me.lake.librestreaming.core.RESFrameRateMeter;
import me.lake.librestreaming.core.listener.RESConnectionListener;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.tools.LogTools;
public class RESRtmpSender {
private static final int TIMEGRANULARITY = 3000;
public static final int FROM_AUDIO = 8;
public static final int FROM_VIDEO = 6;
private WorkHandler workHandler;
private HandlerThread workHandlerThread;
private final Object syncOp = new Object();
public void prepare(RESCoreParameters coreParameters) {
synchronized (syncOp) {
workHandlerThread = new HandlerThread("RESRtmpSender,workHandlerThread");
workHandlerThread.start();
workHandler = new WorkHandler(coreParameters.senderQueueLength,
new FLvMetaData(coreParameters),
workHandlerThread.getLooper());
}
}
public void setConnectionListener(RESConnectionListener connectionListener) {
synchronized (syncOp) {
workHandler.setConnectionListener(connectionListener);
}
}
public String getServerIpAddr() {
synchronized (syncOp) {
return workHandler == null ? null : workHandler.getServerIpAddr();
}
}
public float getSendFrameRate() {
synchronized (syncOp) {
return workHandler == null ? 0 : workHandler.getSendFrameRate();
}
}
public float getSendBufferFreePercent() {
synchronized (syncOp) {
return workHandler == null ? 0 : workHandler.getSendBufferFreePercent();
}
}
public void start(String rtmpAddr) {
synchronized (syncOp) {
workHandler.sendStart(rtmpAddr);
}
}
public void feed(RESFlvData flvData, int type) {
synchronized (syncOp) {
workHandler.sendFood(flvData, type);
}
}
public void stop() {
synchronized (syncOp) {
workHandler.sendStop();
}
}
public void destroy() {
synchronized (syncOp) {
workHandler.removeCallbacksAndMessages(null);
//workHandlerThread.quit();
workHandler.sendStop();
workHandlerThread.quitSafely();
/**
* do not wait librtmp to quit
*/
// try {
// workHandlerThread.join();
// } catch (InterruptedException ignored) {
// }
}
}
public int getTotalSpeed() {
synchronized (syncOp) {
if (workHandler != null) {
return workHandler.getTotalSpeed();
} else {
return 0;
}
}
}
public WorkHandler getWorkHandler(){
return workHandler;
}
public static class WorkHandler extends Handler {
private final static int MSG_START = 1;
private final static int MSG_WRITE = 2;
private final static int MSG_STOP = 3;
private long jniRtmpPointer = 0;
private String serverIpAddr = null;
private int maxQueueLength;
private int writeMsgNum = 0;
private final Object syncWriteMsgNum = new Object();
private RESByteSpeedometer videoByteSpeedometer = new RESByteSpeedometer(TIMEGRANULARITY);
private RESByteSpeedometer audioByteSpeedometer = new RESByteSpeedometer(TIMEGRANULARITY);
private RESFrameRateMeter sendFrameRateMeter = new RESFrameRateMeter();
private FLvMetaData fLvMetaData;
private RESConnectionListener connectionListener;
private final Object syncConnectionListener = new Object();
private int errorTime = 0;
private enum STATE {
IDLE,
RUNNING,
STOPPED
}
private STATE state;
WorkHandler(int maxQueueLength, FLvMetaData fLvMetaData, Looper looper) {
super(looper);
this.maxQueueLength = maxQueueLength;
this.fLvMetaData = fLvMetaData;
state = STATE.IDLE;
}
public String getServerIpAddr() {
return serverIpAddr;
}
public float getSendFrameRate() {
return sendFrameRateMeter.getFps();
}
public float getSendBufferFreePercent() {
synchronized (syncWriteMsgNum) {
float res = (float) (maxQueueLength - writeMsgNum) / (float) maxQueueLength;
return res <= 0 ? 0f : res;
}
}
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case MSG_START:
if (state == STATE.RUNNING) {
break;
}
sendFrameRateMeter.reSet();
LogTools.d("RESRtmpSender,WorkHandler,tid=" + Thread.currentThread().getId());
jniRtmpPointer = RtmpClient.open((String) msg.obj, true);
final int openR = jniRtmpPointer == 0 ? 1 : 0;
if (openR == 0) {
serverIpAddr = RtmpClient.getIpAddr(jniRtmpPointer);
}
synchronized (syncConnectionListener) {
if (connectionListener != null) {
CallbackDelivery.i().post(new Runnable() {
@Override
public void run() {
connectionListener.onOpenConnectionResult(openR);
}
});
}
}
if (jniRtmpPointer == 0) {
break;
} else {
byte[] MetaData = fLvMetaData.getMetaData();
RtmpClient.write(jniRtmpPointer,
MetaData,
MetaData.length,
RESFlvData.FLV_RTMP_PACKET_TYPE_INFO, 0);
state = STATE.RUNNING;
}
break;
case MSG_STOP:
if (state == STATE.STOPPED || jniRtmpPointer == 0) {
break;
}
errorTime = 0;
final int closeR = RtmpClient.close(jniRtmpPointer);
serverIpAddr = null;
synchronized (syncConnectionListener) {
if (connectionListener != null) {
CallbackDelivery.i().post(new Runnable() {
@Override
public void run() {
connectionListener.onCloseConnectionResult(closeR);
}
});
}
}
state = STATE.STOPPED;
break;
case MSG_WRITE:
synchronized (syncWriteMsgNum) {
--writeMsgNum;
}
if (state != STATE.RUNNING) {
break;
}
if(mListener!=null){
mListener.getBufferFree(getSendBufferFreePercent());
}
RESFlvData flvData = (RESFlvData) msg.obj;
if (writeMsgNum >= (maxQueueLength * 3 / 4) && flvData.flvTagType == RESFlvData.FLV_RTMP_PACKET_TYPE_VIDEO && flvData.droppable) {
LogTools.d("senderQueue is crowded,abandon video");
break;
}
final int res = RtmpClient.write(jniRtmpPointer, flvData.byteBuffer, flvData.byteBuffer.length, flvData.flvTagType, flvData.dts);
if (res == 0) {
errorTime = 0;
if (flvData.flvTagType == RESFlvData.FLV_RTMP_PACKET_TYPE_VIDEO) {
videoByteSpeedometer.gain(flvData.size);
sendFrameRateMeter.count();
} else {
audioByteSpeedometer.gain(flvData.size);
}
} else {
++errorTime;
synchronized (syncConnectionListener) {
if (connectionListener != null) {
CallbackDelivery.i().post(new RESConnectionListener.RESWriteErrorRunable(connectionListener, res));
}
}
}
break;
default:
break;
}
}
public void sendStart(String rtmpAddr) {
this.removeMessages(MSG_START);
synchronized (syncWriteMsgNum) {
this.removeMessages(MSG_WRITE);
writeMsgNum = 0;
}
this.sendMessage(this.obtainMessage(MSG_START, rtmpAddr));
}
public void sendStop() {
this.removeMessages(MSG_STOP);
synchronized (syncWriteMsgNum) {
this.removeMessages(MSG_WRITE);
writeMsgNum = 0;
}
this.sendEmptyMessage(MSG_STOP);
}
public void sendFood(RESFlvData flvData, int type) {
synchronized (syncWriteMsgNum) {
//LAKETODO optimize
if (writeMsgNum <= maxQueueLength) {
this.sendMessage(this.obtainMessage(MSG_WRITE, type, 0, flvData));
++writeMsgNum;
} else {
LogTools.d("senderQueue is full,abandon");
}
}
}
public void setConnectionListener(RESConnectionListener connectionListener) {
synchronized (syncConnectionListener) {
this.connectionListener = connectionListener;
}
}
public int getTotalSpeed() {
return getVideoSpeed() + getAudioSpeed();
}
public int getVideoSpeed() {
return videoByteSpeedometer.getSpeed();
}
public int getAudioSpeed() {
return audioByteSpeedometer.getSpeed();
}
private BufferFreeListener mListener=null;
public interface BufferFreeListener{
void getBufferFree(float free);
}
public void setBufferFreeListener(BufferFreeListener listener){
mListener=listener;
}
}
}

View File

@@ -0,0 +1,25 @@
package me.lake.librestreaming.rtmp;
public class RtmpClient {
static {
System.loadLibrary("resrtmp");
}
/**
* @param url
* @param isPublishMode
* @return rtmpPointer ,pointer to native rtmp struct
*/
public static native long open(String url, boolean isPublishMode);
public static native int read(long rtmpPointer, byte[] data, int offset, int size);
public static native int write(long rtmpPointer, byte[] data, int size, int type, int ts);
public static native int close(long rtmpPointer);
public static native String getIpAddr(long rtmpPointer);
}

View File

@@ -0,0 +1,19 @@
package me.lake.librestreaming.tools;
import android.graphics.ImageFormat;
import android.media.MediaCodecInfo;
public class BuffSizeCalculator {
public static int calculator(int width, int height, int colorFormat) {
switch (colorFormat) {
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
case ImageFormat.NV21:
case ImageFormat.YV12:
return width * height * 3 / 2;
default:
return -1;
}
}
}

View File

@@ -0,0 +1,16 @@
package me.lake.librestreaming.tools;
public class ByteArrayTools {
public static void intToByteArrayFull(byte[] dst, int pos, int interger) {
dst[pos] = (byte) ((interger >> 24) & 0xFF);
dst[pos + 1] = (byte) ((interger >> 16) & 0xFF);
dst[pos + 2] = (byte) ((interger >> 8) & 0xFF);
dst[pos + 3] = (byte) ((interger) & 0xFF);
}
public static void intToByteArrayTwoByte(byte[] dst, int pos, int interger) {
dst[pos] = (byte) ((interger >> 8) & 0xFF);
dst[pos + 1] = (byte) ((interger) & 0xFF);
}
}

View File

@@ -0,0 +1,205 @@
package me.lake.librestreaming.tools;
import android.app.Activity;
import android.content.Context;
import android.content.res.Configuration;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.util.Log;
import android.view.Surface;
import java.lang.ref.WeakReference;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* Created by WangShuo on 2017/6/30.
*/
public class CameraUtil {
private static final String tag = "wangshuo";
private CameraSizeComparator sizeComparator = new CameraSizeComparator();
private CameraSizeComparator2 sizeComparator2 = new CameraSizeComparator2();
private static CameraUtil cameraUtil = null;
public static boolean hasSupportedFrontVideoSizes = true;
private CameraUtil(){
}
public static CameraUtil getInstance(){
if(cameraUtil == null){
cameraUtil = new CameraUtil();
return cameraUtil;
}
else{
return cameraUtil;
}
}
public Size getBestSize(List<Size> list, int th){
if(list == null || list.size() < 1){
return null;
}
boolean bool= false;
Collections.sort(list, sizeComparator2);
int i = 0;
for(Size s:list){
if((s.width < th) && (s.width > 350) && equalRate(s, 1.7777f)){
Log.i(tag, "最终设置Video尺寸:w = " + s.width + "h = " + s.height);
bool = true;
break;
}
i++;
}
if(bool){
return list.get(i);
}
return null;
}
public Size getBestPreviewSize(List<Size> list, int th){
if(list == null || list.size() < 1){
return null;
}
boolean bool= false;
Collections.sort(list, sizeComparator);
int i = 0;
for(Size s:list){
if((s.width > th) && equalRate(s, 1.7777f)){
Log.i(tag, "最终设置预览尺寸:w = " + s.width + "h = " + s.height);
bool = true;
break;
}
i++;
}
if(bool){
return list.get(i);
}
return null;
}
public boolean equalRate(Size s, float rate){
float r = (float)(s.width)/(float)(s.height);
if(Math.abs(r - rate) <= 0.2)
{
return true;
}
else{
return false;
}
}
public class CameraSizeComparator implements Comparator<Size> {
//按升序排列
public int compare(Size lhs, Size rhs) {
// TODO Auto-generated method stub
if(lhs.width == rhs.width){
return 0;
}
else if(lhs.width > rhs.width){
return 1;
}
else{
return -1;
}
}
}
public class CameraSizeComparator2 implements Comparator<Size> {
//按降序排列
public int compare(Size lhs, Size rhs) {
// TODO Auto-generated method stub
if(lhs.width == rhs.width){
return 0;
}
else if(lhs.width < rhs.width){
return 1;
}
else{
return -1;
}
}
}
public static List<Size> getBackCameraPreviewSize(){
Camera back = Camera.open(Camera.CameraInfo.CAMERA_FACING_BACK);
List<Size> backSizeList = back.getParameters().getSupportedPreviewSizes();
back.release();
return backSizeList;
}
public static List<Size> getFrontCameraPreviewSize(){
Camera front = Camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
List<Size> frontSizeList = front.getParameters().getSupportedPreviewSizes();
front.release();
return frontSizeList;
}
public static List<Size> getBackCameraVideoSize(){
Camera back = Camera.open(Camera.CameraInfo.CAMERA_FACING_BACK);
List<Size> backSizeList = back.getParameters().getSupportedVideoSizes();
back.release();
return backSizeList;
}
public static List<Size> getFrontCameraVideoSize(){
Camera front = Camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
List<Size> frontSizeList = front.getParameters().getSupportedVideoSizes();
front.release();
return frontSizeList;
}
public static List<Size> getFrontCameraSize(){
Camera front = Camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
List<Size> frontSizeList = front.getParameters().getSupportedVideoSizes();
if(null == frontSizeList || frontSizeList.size()<=0){
frontSizeList = front.getParameters().getSupportedPreviewSizes();
hasSupportedFrontVideoSizes = false;
Log.e(tag,"getSupportedVideoSizes==null");
}
front.release();
return frontSizeList;
}
/**
* 判断当前设备是手机还是平板,代码来自 Google I/O App for Android
* @param context
* @return 平板返回 True手机返回 False
*/
public static boolean isTablet(Context context) {
return (context.getResources().getConfiguration().screenLayout & Configuration.SCREENLAYOUT_SIZE_MASK) >= Configuration.SCREENLAYOUT_SIZE_LARGE;
}
public static void setCameraDisplayOrientation(Activity activity,
int cameraId, android.hardware.Camera camera) {
android.hardware.Camera.CameraInfo info =
new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(cameraId, info);
int rotation = activity.getWindowManager().getDefaultDisplay()
.getRotation();
int degrees = 0;
switch (rotation) {
case Surface.ROTATION_0: degrees = 0; break;
case Surface.ROTATION_90: degrees = 90; break;
case Surface.ROTATION_180: degrees = 180; break;
case Surface.ROTATION_270: degrees = 270; break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
//result=180;
System.out.println("设置相机角度:"+result);
camera.setDisplayOrientation(result);
}
}

View File

@@ -0,0 +1,125 @@
package me.lake.librestreaming.tools;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
public class GLESTools {
public static int FLOAT_SIZE_BYTES = 4;
public static int SHORT_SIZE_BYTES = 2;
public static String readTextFile(Resources res, int resId) {
InputStream inputStream = res.openRawResource(resId);
BufferedReader br = new BufferedReader(new InputStreamReader(inputStream));
String line;
StringBuilder result = new StringBuilder();
try {
while ((line = br.readLine()) != null) {
result.append(line);
result.append("\n");
}
} catch (Exception e) {
e.printStackTrace();
return null;
}
return result.toString();
}
public static int createProgram(Resources res, int vertexShaderResId, int fragmentShaderResId) {
String vertexShaderCode = readTextFile(res, vertexShaderResId);
String fragmentShaderCode = readTextFile(res, fragmentShaderResId);
return createProgram(vertexShaderCode, fragmentShaderCode);
}
public static int createProgram(String vertexShaderCode, String fragmentShaderCode) {
if (vertexShaderCode == null || fragmentShaderCode == null) {
throw new RuntimeException("invalid shader code");
}
int vertexShader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
int fragmentShader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(vertexShader, vertexShaderCode);
GLES20.glShaderSource(fragmentShader, fragmentShaderCode);
int[] status = new int[1];
GLES20.glCompileShader(vertexShader);
GLES20.glGetShaderiv(vertexShader, GLES20.GL_COMPILE_STATUS, status, 0);
if (GLES20.GL_FALSE == status[0]) {
throw new RuntimeException("vertext shader compile,failed:" + GLES20.glGetShaderInfoLog(vertexShader));
}
GLES20.glCompileShader(fragmentShader);
GLES20.glGetShaderiv(fragmentShader, GLES20.GL_COMPILE_STATUS, status, 0);
if (GLES20.GL_FALSE == status[0]) {
throw new RuntimeException("fragment shader compile,failed:" + GLES20.glGetShaderInfoLog(fragmentShader));
}
int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vertexShader);
GLES20.glAttachShader(program, fragmentShader);
GLES20.glLinkProgram(program);
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, status, 0);
if (GLES20.GL_FALSE == status[0]) {
throw new RuntimeException("link program,failed:" + GLES20.glGetProgramInfoLog(program));
}
return program;
}
public static void checkGlError(String op) {
int error = GLES20.glGetError();
if (error != GLES20.GL_NO_ERROR) {
String msg = op + ": glError 0x" + Integer.toHexString(error);
LogTools.d(msg);
throw new RuntimeException(msg);
}
}
public static final int NO_TEXTURE = -1;
public static int loadTexture(final Bitmap image, final int reUseTexture) {
int[] texture = new int[1];
if (reUseTexture == NO_TEXTURE) {
GLES20.glGenTextures(1, texture, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture[0]);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, image, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
} else {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, reUseTexture);
GLUtils.texSubImage2D(GLES20.GL_TEXTURE_2D, 0, 0, 0, image);
texture[0] = reUseTexture;
}
return texture[0];
}
public static void createFrameBuff(int[] frameBuffer, int[] frameBufferTex, int width, int height) {
GLES20.glGenFramebuffers(1, frameBuffer, 0);
GLES20.glGenTextures(1, frameBufferTex, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, frameBufferTex[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLESTools.checkGlError("createCamFrameBuff");
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer[0]);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, frameBufferTex[0], 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLESTools.checkGlError("createCamFrameBuff");
}
}

View File

@@ -0,0 +1,72 @@
package me.lake.librestreaming.tools;
import android.util.Log;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.io.Writer;
import java.net.UnknownHostException;
public class LogTools {
protected static final String TAG = "RESLog";
private static boolean enableLog = false;
public static boolean isEnableLog() {
return enableLog;
}
public static void setEnableLog(boolean enableLog) {
LogTools.enableLog = enableLog;
}
public static void e(String content) {
if (!enableLog) {
return;
}
Log.e(TAG, content);
}
public static void d(String content) {
if (!enableLog) {
return;
}
Log.d(TAG, content);
}
public static void trace(String msg) {
if (!enableLog) {
return;
}
trace(msg, new Throwable());
}
public static void trace(Throwable e) {
if (!enableLog) {
return;
}
trace(null, e);
}
public static void trace(String msg, Throwable e) {
if (!enableLog) {
return;
}
if (null == e || e instanceof UnknownHostException) {
return;
}
final Writer writer = new StringWriter();
final PrintWriter pWriter = new PrintWriter(writer);
e.printStackTrace(pWriter);
String stackTrace = writer.toString();
if (null == msg || msg.equals("")) {
msg = "================error!==================";
}
Log.e(TAG, "==================================");
Log.e(TAG, msg);
Log.e(TAG, stackTrace);
Log.e(TAG, "-----------------------------------");
}
}

View File

@@ -0,0 +1,91 @@
package me.lake.librestreaming.ws;
import android.content.Context;
import android.util.AttributeSet;
import android.view.TextureView;
import android.view.View;
public class AspectTextureView extends TextureView {
public static final int MODE_FITXY = 0;
public static final int MODE_INSIDE = 1;
public static final int MODE_OUTSIDE = 2;
private double targetAspect = -1;
private int aspectMode = MODE_OUTSIDE;
public AspectTextureView(Context context) {
super(context);
}
public AspectTextureView(Context context, AttributeSet attrs) {
super(context, attrs);
}
public AspectTextureView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
/**
* @param mode {@link #MODE_FITXY},{@link #MODE_INSIDE},{@link #MODE_OUTSIDE}
* @param aspectRatio width/height
*/
public void setAspectRatio(int mode, double aspectRatio) {
if (mode != MODE_INSIDE && mode != MODE_OUTSIDE && mode != MODE_FITXY) {
throw new IllegalArgumentException("illegal mode");
}
if (aspectRatio < 0) {
throw new IllegalArgumentException("illegal aspect ratio");
}
if (targetAspect != aspectRatio || aspectMode != mode) {
targetAspect = aspectRatio;
aspectMode = mode;
requestLayout();
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
if (targetAspect > 0) {
int initialWidth = MeasureSpec.getSize(widthMeasureSpec);
int initialHeight = MeasureSpec.getSize(heightMeasureSpec);
double viewAspectRatio = (double) initialWidth / initialHeight;
double aspectDiff = targetAspect / viewAspectRatio - 1;
if (Math.abs(aspectDiff) > 0.01 && aspectMode != MODE_FITXY) {
if (aspectMode == MODE_INSIDE) {
if (aspectDiff > 0) {
initialHeight = (int) (initialWidth / targetAspect);
} else {
initialWidth = (int) (initialHeight * targetAspect);
}
} else if (aspectMode == MODE_OUTSIDE) {
if (aspectDiff > 0) {
initialWidth = (int) (initialHeight * targetAspect);
} else {
initialHeight = (int) (initialWidth / targetAspect);
}
}
widthMeasureSpec = MeasureSpec.makeMeasureSpec(initialWidth, MeasureSpec.EXACTLY);
heightMeasureSpec = MeasureSpec.makeMeasureSpec(initialHeight, MeasureSpec.EXACTLY);
}
}
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
@Override
public void layout(int l, int t, int r, int b) {
View p = (View) getParent();
if (p != null) {
int pw = p.getMeasuredWidth();
int ph = p.getMeasuredHeight();
int w = getMeasuredWidth();
int h = getMeasuredHeight();
t = (ph - h) / 2;
l = (pw - w) / 2;
r += l;
b += t;
}
super.layout(l, t, r, b);
}
}

View File

@@ -0,0 +1,21 @@
package me.lake.librestreaming.ws;
/**
* Created by WangShuo on 2017/6/11.
*/
public class StreamAVOption {
public static int cameraIndex = StreamConfig.AVOptionsHolder.DEFAULT_CAMERA_INDEX;//前后置摄像头
public static int previewWidth = StreamConfig.AVOptionsHolder.DEFAULT_PREVIEW_WIDTH;//预览宽
public static int previewHeight = StreamConfig.AVOptionsHolder.DEFAULT_PREVIEW_HEIGHT;//预览高
public static int videoWidth = StreamConfig.AVOptionsHolder.DEFAULT_VIDEO_WIDTH;//推流的视频宽
public static int videoHeight = StreamConfig.AVOptionsHolder.DEFAULT_VIDEO_HEIGHT;//推流的视频高
public static int videoBitrate = StreamConfig.AVOptionsHolder.DEFAULT_VIDEO_BITRATE;//比特率
public static int videoFramerate = StreamConfig.AVOptionsHolder.DEFAULT_VIDEO_FPS;//帧率
public static int videoGOP = StreamConfig.AVOptionsHolder.DEFAULT_VIDEO_GOP;//gop 关键帧间隔
public String streamUrl = "";
public static int recordVideoWidth = 1920;//录制的视频宽
public static int recordVideoHeight = 1080;//录制的视频高
}

View File

@@ -0,0 +1,74 @@
package me.lake.librestreaming.ws;
import android.content.Context;
import android.content.res.Configuration;
import android.hardware.Camera;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.Size;
import static me.lake.librestreaming.ws.StreamConfig.AVOptionsHolder.DEFAULT_FILTER_MODE;
import static me.lake.librestreaming.ws.StreamConfig.AVOptionsHolder.DEFAULT_RENDER_MODE;
/**
* Created by WangShuo on 2017/6/11.
*/
public class StreamConfig {
public static class AVOptionsHolder {
public static final int DEFAULT_CAMERA_INDEX = Camera.CameraInfo.CAMERA_FACING_BACK;
public static final int DEFAULT_FILTER_MODE = RESConfig.FilterMode.HARD;
public static final int DEFAULT_RENDER_MODE = RESConfig.RenderingMode.NativeWindow;
public static final int DEFAULT_PREVIEW_WIDTH = 1920;
public static final int DEFAULT_PREVIEW_HEIGHT = 1080;
public static final int DEFAULT_VIDEO_WIDTH = 1920;
public static final int DEFAULT_VIDEO_HEIGHT = 1080;
public static final int DEFAULT_VIDEO_BITRATE =20*1024*1024;
public static final int DEFAULT_VIDEO_FPS = 30;
public static final int DEFAULT_VIDEO_GOP = 1;
}
public static RESConfig build(Context context, StreamAVOption option) {
RESConfig res = RESConfig.obtain();
res.setFilterMode(DEFAULT_FILTER_MODE);
res.setRenderingMode(DEFAULT_RENDER_MODE);
res.setTargetPreviewSize(new Size(option.previewWidth,option.previewHeight));
res.setTargetVideoSize(new Size(option.videoWidth, option.videoHeight));
res.setBitRate(option.videoBitrate);
res.setVideoFPS(option.videoFramerate);
res.setVideoGOP(option.videoGOP);
res.setDefaultCamera(option.cameraIndex);
res.setRtmpAddr(option.streamUrl);
int frontDirection, backDirection;
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
Camera.getCameraInfo(Camera.CameraInfo.CAMERA_FACING_FRONT, cameraInfo);
frontDirection = cameraInfo.orientation;
Camera.getCameraInfo(Camera.CameraInfo.CAMERA_FACING_BACK, cameraInfo);
backDirection = cameraInfo.orientation;
if (context.getResources().getConfiguration().orientation == Configuration.ORIENTATION_PORTRAIT) {
System.out.println("竖屏");
res.setFrontCameraDirectionMode((frontDirection == 90 ? RESConfig.DirectionMode.FLAG_DIRECTION_ROATATION_270 : RESConfig.DirectionMode.FLAG_DIRECTION_ROATATION_90) | RESConfig.DirectionMode.FLAG_DIRECTION_FLIP_HORIZONTAL);
res.setBackCameraDirectionMode((backDirection == 90 ? RESConfig.DirectionMode.FLAG_DIRECTION_ROATATION_90 : RESConfig.DirectionMode.FLAG_DIRECTION_ROATATION_90) );
} else {
System.out.println("横屏");
res.setBackCameraDirectionMode((backDirection == 90 ? RESConfig.DirectionMode.FLAG_DIRECTION_ROATATION_0 : RESConfig.DirectionMode.FLAG_DIRECTION_ROATATION_180));
res.setFrontCameraDirectionMode((frontDirection == 90 ? RESConfig.DirectionMode.FLAG_DIRECTION_ROATATION_180 : RESConfig.DirectionMode.FLAG_DIRECTION_ROATATION_0) | RESConfig.DirectionMode.FLAG_DIRECTION_FLIP_HORIZONTAL);
}
return res;
}
}

View File

@@ -0,0 +1,413 @@
package me.lake.librestreaming.ws;
import android.app.Activity;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Gravity;
import android.view.TextureView;
import android.widget.FrameLayout;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import me.lake.librestreaming.client.RESClient;
import me.lake.librestreaming.core.listener.RESConnectionListener;
import me.lake.librestreaming.core.listener.RESScreenShotListener;
import me.lake.librestreaming.core.listener.RESVideoChangeListener;
import me.lake.librestreaming.encoder.MediaAudioEncoder;
import me.lake.librestreaming.encoder.MediaEncoder;
import me.lake.librestreaming.encoder.MediaMuxerWrapper;
import me.lake.librestreaming.encoder.MediaVideoEncoder;
import me.lake.librestreaming.filter.hardvideofilter.BaseHardVideoFilter;
import me.lake.librestreaming.model.RESConfig;
import me.lake.librestreaming.model.Size;
import me.lake.librestreaming.tools.CameraUtil;
import me.lake.librestreaming.ws.filter.audiofilter.SetVolumeAudioFilter;
/**
* Created by WangShuo on 2017/6/11.
*/
public class StreamLiveCameraView extends FrameLayout {
private static final String TAG = "StreamLiveCameraView";
private Context mContext;
private Activity activity;
private AspectTextureView textureView;
private final List<RESConnectionListener> outerStreamStateListeners = new ArrayList<>();
private static RESClient resClient;
private static RESConfig resConfig;
private static int quality_value_min = 400 * 1024;
private static int quality_value_max = 700 * 1024;
public StreamLiveCameraView(Context context) {
super(context);
this.mContext=context;
}
public StreamLiveCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
this.mContext=context;
}
public static synchronized RESClient getRESClient() {
if (resClient == null) {
resClient = new RESClient();
}
return resClient;
}
public void setActivity(Activity activity) {
this.activity = activity;
}
/**
* 根据AVOption初始化&打开预览
* @param avOption
*/
public void init(Context context , StreamAVOption avOption) {
if (avOption == null) {
throw new IllegalArgumentException("AVOption is null.");
}
compatibleSize(avOption);
resClient = getRESClient();
setContext(mContext);
resConfig = StreamConfig.build(context,avOption);
boolean isSucceed = resClient.prepare(resConfig);
if (!isSucceed) {
Log.w(TAG, "推流prepare方法返回false, 状态异常.");
return;
}
initPreviewTextureView();
addListenerAndFilter();
}
private void compatibleSize(StreamAVOption avOptions) {
Camera.Size cameraSize = CameraUtil.getInstance().getBestSize(CameraUtil.getFrontCameraSize(),Integer.parseInt("800"));
if(!CameraUtil.hasSupportedFrontVideoSizes){
if(null == cameraSize || cameraSize.width <= 0){
avOptions.videoWidth = 720;
avOptions.videoHeight = 480;
}else{
avOptions.videoWidth = cameraSize.width;
avOptions.videoHeight = cameraSize.height;
}
}
}
private void initPreviewTextureView() {
if (textureView == null && resClient != null) {
textureView = new AspectTextureView(getContext());
LayoutParams params = new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
params.gravity = Gravity.CENTER;
this.removeAllViews();
this.addView(textureView);
textureView.setKeepScreenOn(true);
textureView.setSurfaceTextureListener(surfaceTextureListenerImpl);
Size s = resClient.getVideoSize();
textureView.setAspectRatio(AspectTextureView.MODE_OUTSIDE, ((double) s.getWidth() / s.getHeight()));
}
}
private void addListenerAndFilter() {
if (resClient != null) {
resClient.setConnectionListener(ConnectionListener);
resClient.setVideoChangeListener(VideoChangeListener);
resClient.setSoftAudioFilter(new SetVolumeAudioFilter());
}
}
/**
* 是否推流
*/
public boolean isStreaming(){
if(resClient != null){
return resClient.isStreaming;
}
return false;
}
/**
* 开始推流
*/
public void startStreaming(String rtmpUrl){
if(resClient != null){
resClient.startStreaming(rtmpUrl);
}
}
/**
* 停止推流
*/
public void stopStreaming(){
if(resClient != null){
resClient.stopStreaming();
}
}
/**
* 开始录制
*/
private MediaMuxerWrapper mMuxer;
private boolean isRecord = false;
public void startRecord(){
if(resClient != null){
resClient.setNeedResetEglContext(true);
try {
mMuxer = new MediaMuxerWrapper(".mp4"); // if you record audio only, ".m4a" is also OK.
new MediaVideoEncoder(mMuxer, mMediaEncoderListener, StreamAVOption.recordVideoWidth, StreamAVOption.recordVideoHeight);
new MediaAudioEncoder(mMuxer, mMediaEncoderListener);
mMuxer.prepare();
mMuxer.startRecording();
isRecord = true;
} catch (IOException e) {
isRecord = false;
e.printStackTrace();
}
}
}
/**
* 停止录制
*/
public String stopRecord() {
isRecord = false;
if (mMuxer != null) {
String path = mMuxer.getFilePath();
mMuxer.stopRecording();
mMuxer = null;
System.gc();
return path;
}
System.gc();
return null;
}
/**
* 是否在录制
*/
public boolean isRecord() {
return isRecord;
}
/**
* 切换摄像头
*/
public void swapCamera(){
if(resClient != null){
resClient.swapCamera();
}
}
/**
* 摄像头焦距 [0.0f,1.0f]
*/
public void setZoomByPercent(float targetPercent){
if(resClient != null){
resClient.setZoomByPercent(targetPercent);
}
}
/**
*摄像头开关闪光灯
*/
public void toggleFlashLight(){
if(resClient != null){
resClient.toggleFlashLight();
}
}
/**
* 推流过程中,重新设置帧率
*/
public void reSetVideoFPS(int fps){
if(resClient != null){
resClient.reSetVideoFPS(fps);
}
}
/**
* 推流过程中,重新设置码率
*/
public void reSetVideoBitrate(int bitrate){
if(resClient != null){
resClient.reSetVideoBitrate(bitrate);
}
}
/**
* 截图
*/
public void takeScreenShot(RESScreenShotListener listener){
if(resClient != null){
resClient.takeScreenShot(listener);
}
}
/**
* 镜像
* @param isEnableMirror 是否启用镜像功能 总开关
* @param isEnablePreviewMirror 是否开启预览镜像
* @param isEnableStreamMirror 是否开启推流镜像
*/
public void setMirror(boolean isEnableMirror,boolean isEnablePreviewMirror,boolean isEnableStreamMirror) {
if(resClient != null) {
resClient.setMirror(isEnableMirror, isEnablePreviewMirror, isEnableStreamMirror);
}
}
/**
* 设置滤镜
*/
public void setHardVideoFilter(BaseHardVideoFilter baseHardVideoFilter){
if(resClient != null){
resClient.setHardVideoFilter(baseHardVideoFilter);
}
}
/**
* 获取BufferFreePercent
*/
public float getSendBufferFreePercent() {
return resClient.getSendBufferFreePercent();
}
/**
* AVSpeed 推流速度 和网络相关
*/
public int getAVSpeed() {
return resClient.getAVSpeed();
}
/**
* 设置上下文
*/
public void setContext(Context context){
if(resClient != null){
resClient.setContext(context);
}
}
/**
* destroy
*/
public void destroy(){
if (resClient != null) {
resClient.setConnectionListener(null);
resClient.setVideoChangeListener(null);
if(resClient.isStreaming){
resClient.stopStreaming();
}
if(isRecord()){
stopRecord();
}
resClient.destroy();
}
}
/**
* 添加推流状态监听
* @param listener
*/
public void addStreamStateListener(RESConnectionListener listener) {
if (listener != null && !outerStreamStateListeners.contains(listener)) {
outerStreamStateListeners.add(listener);
}
}
public void setCreamAr(){
resClient.setCreamAr();
}
RESConnectionListener ConnectionListener =new RESConnectionListener() {
@Override
public void onOpenConnectionResult(int result) {
if(result == 1){
resClient.stopStreaming();
}
for (RESConnectionListener listener: outerStreamStateListeners) {
listener.onOpenConnectionResult(result);
}
}
@Override
public void onWriteError(int errno) {
for (RESConnectionListener listener: outerStreamStateListeners) {
listener.onWriteError(errno);
}
}
@Override
public void onCloseConnectionResult(int result) {
for (RESConnectionListener listener: outerStreamStateListeners) {
listener.onCloseConnectionResult(result);
}
}
};
RESVideoChangeListener VideoChangeListener = new RESVideoChangeListener() {
@Override
public void onVideoSizeChanged(int width, int height) {
if(textureView != null) {
textureView.setAspectRatio(AspectTextureView.MODE_INSIDE, ((double) width) / height);
}
}
};
TextureView.SurfaceTextureListener surfaceTextureListenerImpl = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
if (resClient != null) {
resClient.startPreview(surface, width, height);
CameraUtil.setCameraDisplayOrientation(activity,resClient.getCameraId(),resClient.getCamera());
}
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
if (resClient != null) {
resClient.updatePreview(width, height);
}
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
if (resClient != null) {
resClient.stopPreview(true);
}
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
}
};
/**
* callback methods from encoder
*/
MediaEncoder.MediaEncoderListener mMediaEncoderListener = new MediaEncoder.MediaEncoderListener() {
@Override
public void onPrepared(final MediaEncoder encoder) {
if (encoder instanceof MediaVideoEncoder && resClient != null)
resClient.setVideoEncoder((MediaVideoEncoder) encoder);
}
@Override
public void onStopped(final MediaEncoder encoder) {
if (encoder instanceof MediaVideoEncoder && resClient != null)
resClient.setVideoEncoder(null);
}
};
}

View File

@@ -0,0 +1,65 @@
package me.lake.librestreaming.ws.filter.audiofilter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import me.lake.librestreaming.filter.softaudiofilter.BaseSoftAudioFilter;
public class PcmBgmAudioFilter extends BaseSoftAudioFilter {
FileInputStream fis;
String filePath;
byte[] bgm;
public PcmBgmAudioFilter(String filepath) {
filePath = filepath;
}
@Override
public void onInit(int size) {
super.onInit(size);
try {
fis = new FileInputStream(new File(filePath));
} catch (FileNotFoundException e) {
e.printStackTrace();
}
try {
fis.mark(fis.available());
} catch (IOException e) {
}
bgm = new byte[SIZE];
}
@Override
public boolean onFrame(byte[] orignBuff, byte[] targetBuff, long presentationTimeMs, int sequenceNum) {
try {
if (fis.read(bgm, 0, SIZE) < SIZE) {
return false;
}
} catch (IOException e) {
e.printStackTrace();
}
for (int i = 0; i < SIZE; i += 2) {
short origin = (short) (((orignBuff[i + 1] << 8) | orignBuff[i] & 0xff));
short bg = (short) (((bgm[i + 1] << 8) | bgm[i] & 0xff));
bg /= 32;
origin *=4;
short res = (short) (origin + bg);
targetBuff[i + 1] = (byte) (res >> 8);
targetBuff[i] = (byte) (res);
}
return true;
}
@Override
public void onDestroy() {
super.onDestroy();
try {
fis.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}

View File

@@ -0,0 +1,29 @@
package me.lake.librestreaming.ws.filter.audiofilter;
import me.lake.librestreaming.filter.softaudiofilter.BaseSoftAudioFilter;
public class SetVolumeAudioFilter extends BaseSoftAudioFilter {
private float volumeScale=1.0f;
public SetVolumeAudioFilter() {
}
/**
* @param scale 0.0~
*/
public void setVolumeScale(float scale) {
volumeScale = scale;
}
@Override
public boolean onFrame(byte[] orignBuff, byte[] targetBuff, long presentationTimeMs, int sequenceNum) {
for (int i = 0; i < SIZE; i += 2) {
short origin = (short) (((orignBuff[i + 1] << 8) | orignBuff[i] & 0xff));
origin = (short) (origin * volumeScale);
orignBuff[i + 1] = (byte) (origin >> 8);
orignBuff[i] = (byte) (origin);
}
return false;
}
}

View File

@@ -0,0 +1,43 @@
package me.lake.librestreaming.ws.filter.hardfilter;
import me.lake.librestreaming.filter.hardvideofilter.OriginalHardVideoFilter;
public class FishEyeFilterHard extends OriginalHardVideoFilter {
private static String FRAGMENTSHADER = "" +
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" +
"uniform sampler2D uCamTexture;\n" +
"varying mediump vec2 vCamTextureCoord;\n" +
"const mediump float PI = 3.1415926535;\n" +
"const mediump float aperture = 180.0;\n" +
"const mediump float apertureHalf = 0.5 * aperture * (PI / 180.0);\n" +
"const mediump float maxFactor = sin(apertureHalf);\n" +
"void main(){\n" +
" vec2 pos = 2.0 * vCamTextureCoord.st - 1.0;\n" +
" float l = length(pos);\n" +
" if (l > 1.0) {\n" +
" gl_FragColor = vec4(0.0,0.0,0.0,1);\n" +
" }\n" +
" else {\n" +
" float x = maxFactor * pos.x;\n" +
" float y = maxFactor * pos.y;\n" +
" float n = length(vec2(x, y));\n" +
" float z = sqrt(1.0 - n * n);\n" +
" float r = atan(n, z) / PI;\n" +
" float phi = atan(y, x);\n" +
" float u = r * cos(phi) + 0.5;\n" +
" float v = r * sin(phi) + 0.5;\n" +
" gl_FragColor = texture2D(uCamTexture,vec2(u,v));\n" +
" }\n" +
"}";
public FishEyeFilterHard() {
super(null, FRAGMENTSHADER);
}
@Override
public void onInit(int VWidth, int VHeight) {
super.onInit(VWidth, VHeight);
}
}

View File

@@ -0,0 +1,190 @@
package me.lake.librestreaming.ws.filter.hardfilter;
import android.opengl.GLES20;
import jp.co.cyberagent.android.gpuimage.GPUImageFilter;
/**
* @author WangShuo
*/
public class GPUImageBeautyFilter extends GPUImageFilter {
public static final String BILATERAL_FRAGMENT_SHADER = "" +
"precision highp float;\n"+
" varying highp vec2 textureCoordinate;\n" +
"\n" +
" uniform sampler2D inputImageTexture;\n" +
"\n" +
" uniform highp vec2 singleStepOffset;\n" +
" uniform highp vec4 params;\n" +
" uniform highp float brightness;\n" +
"\n" +
" const highp vec3 W = vec3(0.299, 0.587, 0.114);\n" +
" const highp mat3 saturateMatrix = mat3(\n" +
" 1.1102, -0.0598, -0.061,\n" +
" -0.0774, 1.0826, -0.1186,\n" +
" -0.0228, -0.0228, 1.1772);\n" +
" highp vec2 blurCoordinates[24];\n" +
"\n" +
" highp float hardLight(highp float color) {\n" +
" if (color <= 0.5)\n" +
" color = color * color * 2.0;\n" +
" else\n" +
" color = 1.0 - ((1.0 - color)*(1.0 - color) * 2.0);\n" +
" return color;\n" +
"}\n" +
"\n" +
" void main(){\n" +
" highp vec3 centralColor = texture2D(inputImageTexture, textureCoordinate).rgb;\n" +
" blurCoordinates[0] = textureCoordinate.xy + singleStepOffset * vec2(0.0, -10.0);\n" +
" blurCoordinates[1] = textureCoordinate.xy + singleStepOffset * vec2(0.0, 10.0);\n" +
" blurCoordinates[2] = textureCoordinate.xy + singleStepOffset * vec2(-10.0, 0.0);\n" +
" blurCoordinates[3] = textureCoordinate.xy + singleStepOffset * vec2(10.0, 0.0);\n" +
" blurCoordinates[4] = textureCoordinate.xy + singleStepOffset * vec2(5.0, -8.0);\n" +
" blurCoordinates[5] = textureCoordinate.xy + singleStepOffset * vec2(5.0, 8.0);\n" +
" blurCoordinates[6] = textureCoordinate.xy + singleStepOffset * vec2(-5.0, 8.0);\n" +
" blurCoordinates[7] = textureCoordinate.xy + singleStepOffset * vec2(-5.0, -8.0);\n" +
" blurCoordinates[8] = textureCoordinate.xy + singleStepOffset * vec2(8.0, -5.0);\n" +
" blurCoordinates[9] = textureCoordinate.xy + singleStepOffset * vec2(8.0, 5.0);\n" +
" blurCoordinates[10] = textureCoordinate.xy + singleStepOffset * vec2(-8.0, 5.0);\n" +
" blurCoordinates[11] = textureCoordinate.xy + singleStepOffset * vec2(-8.0, -5.0);\n" +
" blurCoordinates[12] = textureCoordinate.xy + singleStepOffset * vec2(0.0, -6.0);\n" +
" blurCoordinates[13] = textureCoordinate.xy + singleStepOffset * vec2(0.0, 6.0);\n" +
" blurCoordinates[14] = textureCoordinate.xy + singleStepOffset * vec2(6.0, 0.0);\n" +
" blurCoordinates[15] = textureCoordinate.xy + singleStepOffset * vec2(-6.0, 0.0);\n" +
" blurCoordinates[16] = textureCoordinate.xy + singleStepOffset * vec2(-4.0, -4.0);\n" +
" blurCoordinates[17] = textureCoordinate.xy + singleStepOffset * vec2(-4.0, 4.0);\n" +
" blurCoordinates[18] = textureCoordinate.xy + singleStepOffset * vec2(4.0, -4.0);\n" +
" blurCoordinates[19] = textureCoordinate.xy + singleStepOffset * vec2(4.0, 4.0);\n" +
" blurCoordinates[20] = textureCoordinate.xy + singleStepOffset * vec2(-2.0, -2.0);\n" +
" blurCoordinates[21] = textureCoordinate.xy + singleStepOffset * vec2(-2.0, 2.0);\n" +
" blurCoordinates[22] = textureCoordinate.xy + singleStepOffset * vec2(2.0, -2.0);\n" +
" blurCoordinates[23] = textureCoordinate.xy + singleStepOffset * vec2(2.0, 2.0);\n" +
"\n" +
" highp float sampleColor = centralColor.g * 22.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[0]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[1]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[2]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[3]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[4]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[5]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[6]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[7]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[8]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[9]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[10]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[11]).g;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[12]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[13]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[14]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[15]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[16]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[17]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[18]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[19]).g * 2.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[20]).g * 3.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[21]).g * 3.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[22]).g * 3.0;\n" +
" sampleColor += texture2D(inputImageTexture, blurCoordinates[23]).g * 3.0;\n" +
"\n" +
" sampleColor = sampleColor / 62.0;\n" +
"\n" +
" highp float highPass = centralColor.g - sampleColor + 0.5;\n" +
"\n" +
" for (int i = 0; i < 5; i++) {\n" +
" highPass = hardLight(highPass);\n" +
" }\n" +
" highp float lumance = dot(centralColor, W);\n" +
"\n" +
" highp float alpha = pow(lumance, params.r);\n" +
"\n" +
" highp vec3 smoothColor = centralColor + (centralColor-vec3(highPass))*alpha*0.1;\n" +
"\n" +
" smoothColor.r = clamp(pow(smoothColor.r, params.g), 0.0, 1.0);\n" +
" smoothColor.g = clamp(pow(smoothColor.g, params.g), 0.0, 1.0);\n" +
" smoothColor.b = clamp(pow(smoothColor.b, params.g), 0.0, 1.0);\n" +
"\n" +
" highp vec3 lvse = vec3(1.0)-(vec3(1.0)-smoothColor)*(vec3(1.0)-centralColor);\n" +
" highp vec3 bianliang = max(smoothColor, centralColor);\n" +
" highp vec3 rouguang = 2.0*centralColor*smoothColor + centralColor*centralColor - 2.0*centralColor*centralColor*smoothColor;\n" +
"\n" +
" gl_FragColor = vec4(mix(centralColor, lvse, alpha), 1.0);\n" +
" gl_FragColor.rgb = mix(gl_FragColor.rgb, bianliang, alpha);\n" +
" gl_FragColor.rgb = mix(gl_FragColor.rgb, rouguang, params.b);\n" +
"\n" +
" highp vec3 satcolor = gl_FragColor.rgb * saturateMatrix;\n" +
" gl_FragColor.rgb = mix(gl_FragColor.rgb, satcolor, params.a);\n" +
" gl_FragColor.rgb = vec3(gl_FragColor.rgb + vec3(brightness));\n" +
"}";
private float toneLevel;
private float beautyLevel;
private float brightLevel;
private int paramsLocation;
private int brightnessLocation;
private int singleStepOffsetLocation;
public GPUImageBeautyFilter() {
super(NO_FILTER_VERTEX_SHADER, BILATERAL_FRAGMENT_SHADER);
toneLevel = -0.5f;
beautyLevel = 0.8f;
brightLevel = 0.3f;
}
@Override
public void onInit() {
super.onInit();
paramsLocation = GLES20.glGetUniformLocation(getProgram(), "params");
brightnessLocation = GLES20.glGetUniformLocation(getProgram(), "brightness");
singleStepOffsetLocation = GLES20.glGetUniformLocation(getProgram(), "singleStepOffset");
setParams(beautyLevel, toneLevel);
setBrightLevel(brightLevel);
}
//磨皮
public void setBeautyLevel(float beautyLevel) {
this.beautyLevel = beautyLevel;
setParams(beautyLevel, toneLevel);
}
//美白
public void setBrightLevel(float brightLevel) {
this.brightLevel = brightLevel;
setFloat(brightnessLocation, 0.6f * (-0.5f + brightLevel));
}
//红润
public void setToneLevel(float toneLevel) {
this.toneLevel = toneLevel;
setParams(beautyLevel, toneLevel);
}
public void setAllBeautyParams(float beauty,float bright,float tone) {
setBeautyLevel(beauty);
setBrightLevel(bright);
setToneLevel(tone);
}
public void setParams(float beauty, float tone) {
float[] vector = new float[4];
vector[0] = 1.0f - 0.6f * beauty;
vector[1] = 1.0f - 0.3f * beauty;
vector[2] = 0.1f + 0.3f * tone;
vector[3] = 0.1f + 0.3f * tone;
setFloatVec4(paramsLocation, vector);
}
private void setTexelSize(final float w, final float h) {
setFloatVec2(singleStepOffsetLocation, new float[] {2.0f / w, 2.0f / h});
}
@Override
public void onOutputSizeChanged(final int width, final int height) {
super.onOutputSizeChanged(width, height);
setTexelSize(width, height);
}
}

View File

@@ -0,0 +1,176 @@
package me.lake.librestreaming.ws.filter.hardfilter;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.RectF;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.os.Build;
import java.nio.FloatBuffer;
import me.lake.librestreaming.filter.hardvideofilter.BaseHardVideoFilter;
import me.lake.librestreaming.tools.GLESTools;
public class WatermarkFilter extends BaseHardVideoFilter {
protected int glProgram;
protected int glCamTextureLoc;
protected int glCamPostionLoc;
protected int glCamTextureCoordLoc;
protected int glImageTextureLoc;
protected int glImageRectLoc;
protected String vertexShader_filter = "" +
"attribute vec4 aCamPosition;\n" +
"attribute vec2 aCamTextureCoord;\n" +
"varying vec2 vCamTextureCoord;\n" +
"void main(){\n" +
" gl_Position= aCamPosition;\n" +
" vCamTextureCoord = aCamTextureCoord;\n" +
"}";
protected String fragmentshader_filter = "" +
"precision mediump float;\n" +
"varying mediump vec2 vCamTextureCoord;\n" +
"uniform sampler2D uCamTexture;\n" +
"uniform sampler2D uImageTexture;\n" +
"uniform vec4 imageRect;\n" +
"void main(){\n" +
" lowp vec4 c1 = texture2D(uCamTexture, vCamTextureCoord);\n" +
" lowp vec2 vCamTextureCoord2 = vec2(vCamTextureCoord.x,1.0-vCamTextureCoord.y);\n" +
" if(vCamTextureCoord2.x>imageRect.r && vCamTextureCoord2.x<imageRect.b && vCamTextureCoord2.y>imageRect.g && vCamTextureCoord2.y<imageRect.a)\n" +
" {\n" +
" vec2 imagexy = vec2((vCamTextureCoord2.x-imageRect.r)/(imageRect.b-imageRect.r),(vCamTextureCoord2.y-imageRect.g)/(imageRect.a-imageRect.g));\n" +
" lowp vec4 c2 = texture2D(uImageTexture, imagexy);\n" +
" lowp vec4 outputColor = c2+c1*c1.a*(1.0-c2.a);\n" +
" outputColor.a = 1.0;\n" +
" gl_FragColor = outputColor;\n" +
" }else\n" +
" {\n" +
" gl_FragColor = c1;\n" +
" }\n" +
"}";
protected int imageTexture = GLESTools.NO_TEXTURE;
protected final Object syncBitmap = new Object();
protected Bitmap iconBitmap;
protected boolean needUpdate;
protected RectF iconRectF;
protected Rect iconRect;
public WatermarkFilter(Bitmap _bitmap, Rect _rect) {
iconBitmap = _bitmap;
needUpdate = true;
iconRectF = new RectF();
iconRect = _rect;
}
protected WatermarkFilter() {
iconBitmap = null;
needUpdate = false;
iconRectF = new RectF(0,0,0,0);
}
public void updateIcon(Bitmap _bitmap, Rect _rect) {
synchronized (syncBitmap) {
if (_bitmap != null) {
iconBitmap = _bitmap;
needUpdate = true;
}
if (_rect != null) {
iconRect = _rect;
}
}
}
public void updateText(float textSize,String text){
updateIcon(fromText(textSize, text),iconRect);
}
public static Bitmap fromText(float textSize, String text) {
text=" "+text+" ";
Paint paint = new Paint();
paint.setTextSize(textSize);
paint.setTextAlign(Paint.Align.LEFT);
paint.setColor(Color.WHITE);
Paint.FontMetricsInt fm = paint.getFontMetricsInt();
int width = (int)paint.measureText(text);
int height = fm.descent - fm.ascent;
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bitmap);
canvas.drawARGB(80,255,255,255);
canvas.drawText(text, 0, fm.leading - fm.ascent, paint);
canvas.save();
return bitmap;
}
@Override
public void onInit(int VWidth, int VHeight) {
super.onInit(VWidth, VHeight);
glProgram = GLESTools.createProgram(vertexShader_filter, fragmentshader_filter);
GLES20.glUseProgram(glProgram);
glCamTextureLoc = GLES20.glGetUniformLocation(glProgram, "uCamTexture");
glImageTextureLoc = GLES20.glGetUniformLocation(glProgram, "uImageTexture");
glCamPostionLoc = GLES20.glGetAttribLocation(glProgram, "aCamPosition");
glCamTextureCoordLoc = GLES20.glGetAttribLocation(glProgram, "aCamTextureCoord");
glImageRectLoc = GLES20.glGetUniformLocation(glProgram, "imageRect");
}
@Override
public void onDraw(int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textrueBuffer) {
synchronized (syncBitmap) {
if (needUpdate) {
if (imageTexture != GLESTools.NO_TEXTURE) {
GLES20.glDeleteTextures(1, new int[]{imageTexture}, 0);
}
imageTexture = GLESTools.loadTexture(iconBitmap, GLESTools.NO_TEXTURE);
}
}
iconRectF.top = iconRect.top / (float) SIZE_HEIGHT;
iconRectF.bottom = iconRect.bottom / (float) SIZE_HEIGHT;
iconRectF.left = iconRect.left / (float) SIZE_WIDTH;
iconRectF.right = iconRect.right / (float) SIZE_WIDTH;
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, targetFrameBuffer);
GLES20.glUseProgram(glProgram);
GLES20.glUniform4f(glImageRectLoc, iconRectF.left, iconRectF.top, iconRectF.right, iconRectF.bottom);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, cameraTexture);
GLES20.glUniform1i(glCamTextureLoc, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, imageTexture);
GLES20.glUniform1i(glImageTextureLoc, 1);
GLES20.glEnableVertexAttribArray(glCamPostionLoc);
GLES20.glEnableVertexAttribArray(glCamTextureCoordLoc);
shapeBuffer.position(0);
GLES20.glVertexAttribPointer(glCamPostionLoc, 2,
GLES20.GL_FLOAT, false,
2 * 4, shapeBuffer);
textrueBuffer.position(0);
GLES20.glVertexAttribPointer(glCamTextureCoordLoc, 2,
GLES20.GL_FLOAT, false,
2 * 4, textrueBuffer);
GLES20.glViewport(0, 0, SIZE_WIDTH, SIZE_HEIGHT);
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
GLES20.glDrawElements(GLES20.GL_TRIANGLES, drawIndecesBuffer.limit(), GLES20.GL_UNSIGNED_SHORT, drawIndecesBuffer);
GLES20.glFinish();
GLES20.glDisableVertexAttribArray(glCamPostionLoc);
GLES20.glDisableVertexAttribArray(glCamTextureCoordLoc);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
GLES20.glUseProgram(0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}
@Override
public void onDestroy() {
super.onDestroy();
GLES20.glDeleteProgram(glProgram);
GLES20.glDeleteTextures(1, new int[]{imageTexture}, 0);
}
}

View File

@@ -0,0 +1,136 @@
package me.lake.librestreaming.ws.filter.hardfilter.extra;
import android.opengl.GLES20;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import jp.co.cyberagent.android.gpuimage.GPUImageFilter;
import me.lake.librestreaming.filter.hardvideofilter.BaseHardVideoFilter;
import me.lake.librestreaming.model.RESCoreParameters;
import me.lake.librestreaming.tools.GLESTools;
public class GPUImageCompatibleFilter<T extends GPUImageFilter> extends BaseHardVideoFilter {
private T innerGPUImageFilter;
private FloatBuffer innerShapeBuffer;
private FloatBuffer innerTextureBuffer;
public GPUImageCompatibleFilter(T filter) {
innerGPUImageFilter = filter;
}
public T getGPUImageFilter() {
return innerGPUImageFilter;
}
@Override
public void onInit(int VWidth, int VHeight) {
super.onInit(VWidth, VHeight);
innerGPUImageFilter.init();
innerGPUImageFilter.onOutputSizeChanged(VWidth, VHeight);
}
@Override
public void onDraw(int cameraTexture, int targetFrameBuffer, FloatBuffer shapeBuffer, FloatBuffer textrueBuffer) {
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, targetFrameBuffer);
innerGPUImageFilter.onDraw(cameraTexture, innerShapeBuffer, innerTextureBuffer);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
}
@Override
public void onDestroy() {
super.onDestroy();
innerGPUImageFilter.destroy();
}
@Override
public void onDirectionUpdate(int _directionFlag) {
if (directionFlag != _directionFlag) {
innerShapeBuffer = getGPUImageCompatShapeVerticesBuffer();
innerTextureBuffer = getGPUImageCompatTextureVerticesBuffer(directionFlag);
}
}
public static final float TEXTURE_NO_ROTATION[] = {
1.0f, 1.0f,
0.0f, 1.0f,
1.0f, 0.0f,
0.0f, 0.0f,
};
public static final float TEXTURE_ROTATED_90[] = {
0.0f, 1.0f,
0.0f, 0.0f,
1.0f, 1.0f,
1.0f, 0.0f,
};
public static final float TEXTURE_ROTATED_180[] = {
0.0f, 0.0f,
1.0f, 0.0f,
0.0f, 1.0f,
1.0f, 1.0f,
};
public static final float TEXTURE_ROTATED_270[] = {
1.0f, 0.0f,
1.0f, 1.0f,
0.0f, 0.0f,
0.0f, 1.0f,
};
static final float CUBE[] = {
-1.0f, -1.0f,
1.0f, -1.0f,
-1.0f, 1.0f,
1.0f, 1.0f,
};
public static FloatBuffer getGPUImageCompatShapeVerticesBuffer() {
FloatBuffer result = ByteBuffer.allocateDirect(GLESTools.FLOAT_SIZE_BYTES * CUBE.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
result.put(CUBE);
result.position(0);
return result;
}
public static FloatBuffer getGPUImageCompatTextureVerticesBuffer(final int directionFlag) {
float[] buffer;
switch (directionFlag & 0xF0) {
case RESCoreParameters.FLAG_DIRECTION_ROATATION_90:
buffer = TEXTURE_ROTATED_90.clone();
break;
case RESCoreParameters.FLAG_DIRECTION_ROATATION_180:
buffer = TEXTURE_ROTATED_180.clone();
break;
case RESCoreParameters.FLAG_DIRECTION_ROATATION_270:
buffer = TEXTURE_ROTATED_270.clone();
break;
default:
buffer = TEXTURE_NO_ROTATION.clone();
}
if ((directionFlag & RESCoreParameters.FLAG_DIRECTION_FLIP_HORIZONTAL) != 0) {
buffer[0] = flip(buffer[0]);
buffer[2] = flip(buffer[2]);
buffer[4] = flip(buffer[4]);
buffer[6] = flip(buffer[6]);
}
if ((directionFlag & RESCoreParameters.FLAG_DIRECTION_FLIP_VERTICAL) != 0) {
buffer[1] = flip(buffer[1]);
buffer[3] = flip(buffer[3]);
buffer[5] = flip(buffer[5]);
buffer[7] = flip(buffer[7]);
}
FloatBuffer result = ByteBuffer.allocateDirect(GLESTools.FLOAT_SIZE_BYTES * buffer.length).
order(ByteOrder.nativeOrder()).
asFloatBuffer();
result.put(buffer);
result.position(0);
return result;
}
private static float flip(final float i) {
return i == 0.0f ? 1.0f : 0.0f;
}
}

View File

@@ -0,0 +1 @@
include $(call all-subdir-makefiles)

View File

@@ -0,0 +1,5 @@
//APP_STL := gnustl_static
APP_ABI := armeabi-v7a
APP_PLATFORM := android-10
//APP_OPTIM := debug
//NDK_DEBUG = 1

View File

@@ -0,0 +1,16 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES := restreaming.c \
colorConvert.c
LOCAL_C_INCLUDES :=$(LOCAL_PATH)/
LOCAL_MODULE := restreaming
LOCAL_LDLIBS := -llog -ljnigraphics -landroid
include $(BUILD_SHARED_LIBRARY)

View File

@@ -0,0 +1,272 @@
#include "colorConvert.h"
#include <string.h>
#include "log.h"
void NV21TOYUV420SP(const unsigned char *src,const unsigned char *dst,int ySize)
{
memcpy(dst,src,ySize);
int uvSize = ySize>>1;
int uSize = uvSize>>1;
memcpy(dst+ySize,src+ySize+1,uvSize-1);
unsigned char *nvcur = src+ySize;
unsigned char *yuvcur = dst+ySize+1;
int i=0;
while(i<uSize)
{
(*yuvcur)=(*nvcur);
yuvcur+=2;
nvcur+=2;
++i;
}
}
void NV21TOYUV420P(const unsigned char *src,const unsigned char *dst,int ySize)
{
memcpy(dst,src,ySize);
int uSize = ySize>>2;
unsigned char *srcucur = src+ySize+1;
unsigned char *srcvcur = src+ySize;
unsigned char *dstucur = dst+ySize;
unsigned char *dstvcur = dst+ySize+uSize;
int i=0;
while(i<uSize)
{
(*dstucur)=(*srcucur);
(*dstvcur)=(*srcvcur);
srcucur+=2;
srcvcur+=2;
++dstucur;
++dstvcur;
++i;
}
}
void YUV420SPTOYUV420P(const unsigned char *src,const unsigned char *dst,int ySize)
{
memcpy(dst,src,ySize);
int uSize = ySize>>2;
unsigned char *srcucur = src+ySize;
unsigned char *srcvcur = src+ySize+1;
unsigned char *dstucur = dst+ySize;
unsigned char *dstvcur = dst+ySize+uSize;
int i=0;
while(i<uSize)
{
(*dstucur)=(*srcucur);
(*dstvcur)=(*srcvcur);
srcucur+=2;
srcvcur+=2;
++dstucur;
++dstvcur;
++i;
}
}
void NV21TOARGB(const unsigned char *src,const unsigned int *dst,int width,int height)
{
int frameSize = width * height;
int i = 0, j = 0,yp = 0;
int uvp = 0, u = 0, v = 0;
int y1192 = 0, r = 0, g = 0, b = 0;
unsigned int *target=dst;
for (j = 0, yp = 0; j < height; j++)
{
uvp = frameSize + (j >> 1) * width;
u = 0;
v = 0;
for (i = 0; i < width; i++, yp++)
{
int y = (0xff & ((int) src[yp])) - 16;
if (y < 0)
y = 0;
if ((i & 1) == 0)
{
v = (0xff & src[uvp++]) - 128;
u = (0xff & src[uvp++]) - 128;
}
y1192 = 1192 * y;
r = (y1192 + 1634 * v);
g = (y1192 - 833 * v - 400 * u);
b = (y1192 + 2066 * u);
if (r < 0) r = 0; else if (r > 262143) r = 262143;
if (g < 0) g = 0; else if (g > 262143) g = 262143;
if (b < 0) b = 0; else if (b > 262143) b = 262143;
target[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
}
}
}
#define IS_FLIP_H ((FLAG_DIRECTION_FLIP_HORIZONTAL&directionFlag)!=0)
#define IS_FLIP_V ((FLAG_DIRECTION_FLIP_VERTICAL&directionFlag)!=0)
void NV21Transform(const unsigned char *src,const unsigned char *dst,int srcWidth,int srcHeight,int directionFlag)
{
unsigned char *cdst=dst;
unsigned char *csrc=src;
int rotate=0;
int hflip=0;
int vflip=0;
if((FLAG_DIRECTION_ROATATION_0&directionFlag)!=0 || (FLAG_DIRECTION_ROATATION_180&directionFlag)!=0){
rotate =0;
}else{
rotate =1;
}
if((FLAG_DIRECTION_ROATATION_0&directionFlag)!=0 || (FLAG_DIRECTION_ROATATION_90&directionFlag)!=0){
hflip = IS_FLIP_H?1:0;
vflip = IS_FLIP_V?1:0;
}else{
if(IS_FLIP_V){
hflip = IS_FLIP_H?0:1;
vflip = IS_FLIP_H?0:0;
}else{
hflip = IS_FLIP_H?0:1;
vflip = IS_FLIP_H?1:1;
}
}
int ySize=srcHeight*srcWidth;
int totalSize = ySize*3 / 2;
int yStart,yStep,xStep;
if(rotate==0 && hflip==0 && vflip==0){
memcpy(cdst,csrc,totalSize);
return;
}
int srcX,srcY,srcCurr;
int dstX,dstY,dstCurr;
int halfHeight=srcHeight>>1,halfWidth=srcWidth>>1;
if(rotate==1){
//transformY
if(hflip==1){
yStart=vflip==1?ySize-srcHeight:ySize-1;
yStep=vflip==1?1:-1;
xStep=-srcHeight;
}else{
yStart=vflip==1?0:srcHeight-1;
yStep=vflip==1?1:-1;
xStep=srcHeight;
}
srcCurr=-1;
for(srcY=0;srcY<srcHeight;++srcY){
dstCurr = yStart;
for(srcX=0;srcX<srcWidth;++srcX){
cdst[dstCurr]=csrc[++srcCurr];
dstCurr+=xStep;
}
yStart+=yStep;
}
//transformVU
if(hflip==1){
yStart=vflip==1?totalSize-srcHeight:totalSize-2;
yStep=vflip==1?2:-2;
xStep=-srcHeight;
}else{
yStart=vflip==1?ySize:ySize+srcHeight-2;
yStep=vflip==1?2:-2;
xStep=srcHeight;
}
srcCurr=ySize-1;
for(srcY=0;srcY<halfHeight;++srcY){
dstCurr = yStart;
for(srcX=0;srcX<halfWidth;++srcX){
cdst[dstCurr]=csrc[++srcCurr];
cdst[dstCurr+1]=csrc[++srcCurr];
dstCurr+=xStep;
}
yStart+=yStep;
}
}else{
if(vflip==1 && hflip==0){
//transformY
yStart = ySize-srcWidth;
srcCurr=-1;
for(srcY=0;srcY<srcHeight;++srcY){
dstCurr = yStart-1;
for(srcX=0;srcX<srcWidth;++srcX){
cdst[++dstCurr]=csrc[++srcCurr];
}
yStart-=srcWidth;
}
//transformVU
yStart=totalSize-srcWidth;
for(srcY=0;srcY<halfHeight;++srcY){
dstCurr = yStart-1;
for(srcX=0;srcX<halfWidth;++srcX){
cdst[++dstCurr]=csrc[++srcCurr];
cdst[++dstCurr]=csrc[++srcCurr];
}
yStart-=srcWidth;
}
}else{
yStep=vflip==1?-srcWidth:srcWidth;
yStart=vflip==1?ySize-1:srcWidth-1;
//transformY
srcCurr=-1;
for(srcY=0;srcY<srcHeight;++srcY){
dstCurr = yStart+1;
for(srcX=0;srcX<srcWidth;++srcX){
cdst[--dstCurr]=csrc[++srcCurr];
}
yStart+=yStep;
}
//transformVU
yStart=vflip==1?totalSize-1:ySize+srcWidth-1;
for(srcY=0;srcY<halfHeight;++srcY){
dstCurr = yStart;
for(srcX=0;srcX<halfWidth;++srcX){
cdst[dstCurr-1]=csrc[++srcCurr];
cdst[dstCurr]=csrc[++srcCurr];
dstCurr-=2;
}
yStart+=yStep;
}
}
}
}
void NV21TOYUV(const unsigned char *src,const unsigned char *dstY,const unsigned char *dstU,const unsigned char *dstV,int width,int height)
{
int ySize=width*height;
int uvSize=ySize>>1;
int uSize = uvSize>>1;
//y
memcpy(dstY,src,ySize);
//uv
unsigned char *srcucur = src+ySize+1;
unsigned char *srcvcur = src+ySize;
unsigned char *dstucur = dstU;
unsigned char *dstvcur = dstV;
int i=0;
while(i<uSize)
{
(*dstucur)=(*srcucur);
(*dstvcur)=(*srcvcur);
srcucur+=2;
srcvcur+=2;
++dstucur;
++dstvcur;
++i;
}
}
void FIXGLPIXEL(const unsigned int *src,unsigned int *dst,int width,int height)
{
int i=0;
int x,y;
unsigned char temp;
unsigned char *srcucur;
unsigned char *dstucur;
unsigned char *dstu=dst;
unsigned char *srcu=src;
for(y=0;y<height;y++)
{
srcucur=(srcu+y*width*4);
int step=(height-y-1)*width*4;
dstucur=(dstu+step);
dstucur+=3;
for(x=0;x<width;x++){
(*dstucur)=(unsigned char)(*(srcucur+3));
(*(dstucur+1))=(unsigned char)(*(srcucur+2));
(*(dstucur+2))=(unsigned char)(*(srcucur+1));
(*(dstucur+3))=(unsigned char)(*(srcucur));
srcucur+=4;
dstucur+=4;
}
}
}

View File

@@ -0,0 +1,20 @@
#ifndef __COLORCONVERT_H__
#define __COLORCONVERT_H__
#define COLOR_FORMAT_NV21 17
#define FLAG_DIRECTION_FLIP_HORIZONTAL 0x01
#define FLAG_DIRECTION_FLIP_VERTICAL 0x02
#define FLAG_DIRECTION_ROATATION_0 0x10
#define FLAG_DIRECTION_ROATATION_90 0x20
#define FLAG_DIRECTION_ROATATION_180 0x40
#define FLAG_DIRECTION_ROATATION_270 0x80
void NV21TOYUV420SP(const unsigned char *src,const unsigned char *dst,int ySize);
void YUV420SPTOYUV420P(const unsigned char *src,const unsigned char *dst,int ySize);
void NV21TOYUV420P(const unsigned char *src,const unsigned char *dst,int ySize);
void NV21TOARGB(const unsigned char *src,const unsigned int *dst,int width,int height);
void NV21Transform(const unsigned char *src,const unsigned char *dst,int dstWidth,int dstHeight,int directionFlag);
void NV21TOYUV(const unsigned char *src,const unsigned char *dstY,const unsigned char *dstU,const unsigned char *dstV,int width,int height);
void FIXGLPIXEL(const unsigned int *src,unsigned int *dst,int width,int height);
#endif

View File

@@ -0,0 +1,28 @@
#ifndef __LEUDLOG_H__
#define __LEUDLOG_H__
#define ENABLE_LOG 1
#ifdef __ANDROID__
#include <android/log.h>
#define LOG_TAG "RES"
#ifdef ENABLE_LOG
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)
#else
#define LOGD(...)
#endif
#else
#include <stdio.h>
#ifdef ENABLE_LOG
#define LOGD(...) printf(__VA_ARGS__)
#else
#define LOGD(...)
#endif
#endif
#endif

View File

@@ -0,0 +1,104 @@
#include "colorConvert.h"
#include "log.h"
#include "jni.h"
#include <string.h>
#include <android/native_window_jni.h>
JNIEXPORT void JNICALL Java_me_lake_librestreaming_core_ColorHelper_NV21TOYUV420SP
(JNIEnv * env, jobject thiz, jbyteArray srcarray,jbyteArray dstarray,jint ySize) {
unsigned char *src = (unsigned char *)(*env)->GetByteArrayElements(env,srcarray, 0);
unsigned char *dst = (unsigned char*)(*env)->GetByteArrayElements(env,dstarray, 0);
NV21TOYUV420SP(src,dst,ySize);
(*env)->ReleaseByteArrayElements(env,srcarray,src,JNI_ABORT);
(*env)->ReleaseByteArrayElements(env,dstarray,dst,JNI_ABORT);
return;
}
JNIEXPORT void JNICALL Java_me_lake_librestreaming_core_ColorHelper_YUV420SPTOYUV420P
(JNIEnv * env, jobject thiz, jbyteArray srcarray,jbyteArray dstarray,jint ySize) {
unsigned char *src = (unsigned char *)(*env)->GetByteArrayElements(env,srcarray, 0);
unsigned char *dst = (unsigned char*)(*env)->GetByteArrayElements(env,dstarray, 0);
YUV420SPTOYUV420P(src,dst,ySize);
(*env)->ReleaseByteArrayElements(env,srcarray,src,JNI_ABORT);
(*env)->ReleaseByteArrayElements(env,dstarray,dst,JNI_ABORT);
return;
}
JNIEXPORT void JNICALL Java_me_lake_librestreaming_core_ColorHelper_NV21TOYUV420P
(JNIEnv * env, jobject thiz, jbyteArray srcarray,jbyteArray dstarray,jint ySize) {
unsigned char *src = (unsigned char *)(*env)->GetByteArrayElements(env,srcarray, 0);
unsigned char *dst = (unsigned char*)(*env)->GetByteArrayElements(env,dstarray, 0);
NV21TOYUV420P(src,dst,ySize);
(*env)->ReleaseByteArrayElements(env,srcarray,src,JNI_ABORT);
(*env)->ReleaseByteArrayElements(env,dstarray,dst,JNI_ABORT);
return;
}
JNIEXPORT void JNICALL Java_me_lake_librestreaming_core_ColorHelper_NV21TOARGB
(JNIEnv *env, jobject thiz,jbyteArray srcarray,jintArray dstarray,jint width,jint height){
unsigned char *src = (unsigned char *)(*env)->GetByteArrayElements(env,srcarray, 0);
unsigned int *dst = (unsigned int*)(*env)->GetIntArrayElements(env,dstarray, 0);
NV21TOARGB(src,dst,width,height);
(*env)->ReleaseByteArrayElements(env,srcarray,src,JNI_ABORT);
(*env)->ReleaseIntArrayElements(env,dstarray,dst,JNI_ABORT);
return;
}
JNIEXPORT void JNICALL Java_me_lake_librestreaming_core_ColorHelper_NV21Transform
(JNIEnv * env, jobject thiz, jbyteArray srcarray,jbyteArray dstarray,jint srcwidth,jint srcheight,jint directionflag) {
unsigned char *src = (unsigned char*)(*env)->GetByteArrayElements(env,srcarray, 0);
unsigned char *dst = (unsigned char*)(*env)->GetByteArrayElements(env,dstarray, 0);
NV21Transform(src,dst,srcwidth,srcheight,directionflag);
(*env)->ReleaseByteArrayElements(env,srcarray,src,JNI_ABORT);
(*env)->ReleaseByteArrayElements(env,dstarray,dst,JNI_ABORT);
return;
}
JNIEXPORT void JNICALL Java_me_lake_librestreaming_render_GLESRender_NV21TOYUV
(JNIEnv *env, jobject thiz,jbyteArray srcarray,jbyteArray dstYarray,jbyteArray dstUarray,jbyteArray dstVarray,jint width,jint height){
unsigned char *src = (unsigned char*)(*env)->GetByteArrayElements(env,srcarray, 0);
unsigned char *dsty = (unsigned char*)(*env)->GetByteArrayElements(env,dstYarray, 0);
unsigned char *dstu = (unsigned char*)(*env)->GetByteArrayElements(env,dstUarray, 0);
unsigned char *dstv = (unsigned char*)(*env)->GetByteArrayElements(env,dstVarray, 0);
NV21TOYUV(src,dsty,dstu,dstv,width,height);
(*env)->ReleaseByteArrayElements(env,srcarray,src,JNI_ABORT);
(*env)->ReleaseByteArrayElements(env,dstYarray,dsty,JNI_ABORT);
(*env)->ReleaseByteArrayElements(env,dstUarray,dstu,JNI_ABORT);
(*env)->ReleaseByteArrayElements(env,dstVarray,dstv,JNI_ABORT);
return;
}
JNIEXPORT void JNICALL Java_me_lake_librestreaming_core_ColorHelper_FIXGLPIXEL
(JNIEnv * env, jobject thiz, jintArray srcarray,jintArray dstarray,jint w,jint h) {
unsigned int *src = (unsigned int *)(*env)->GetIntArrayElements(env,srcarray, 0);
unsigned int *dst = (unsigned int *)(*env)->GetIntArrayElements(env,dstarray, 0);
FIXGLPIXEL(src,dst,w,h);
(*env)->ReleaseIntArrayElements(env,srcarray,src,JNI_ABORT);
(*env)->ReleaseIntArrayElements(env,dstarray,dst,JNI_ABORT);
return;
}
//rendering
JNIEXPORT void JNICALL Java_me_lake_librestreaming_render_NativeRender_renderingSurface
(JNIEnv * env, jobject thiz,jobject javaSurface,jbyteArray pixelsArray,jint w,jint h,jint size) {
ANativeWindow* window = ANativeWindow_fromSurface(env, javaSurface);
if(window!=NULL)
{
ANativeWindow_setBuffersGeometry(window,w,h,COLOR_FORMAT_NV21);
ANativeWindow_Buffer buffer;
if (ANativeWindow_lock(window, &buffer, NULL) == 0) {
unsigned char *pixels = (unsigned char*)(*env)->GetByteArrayElements(env,pixelsArray, 0);
if(buffer.width==buffer.stride){
memcpy(buffer.bits, pixels, size);
}else{
int height = h*3/2;
int width = w;
int i=0;
for(;i<height;++i)
memcpy(buffer.bits + buffer.stride * i
, pixels + width * i
, width);
}
(*env)->ReleaseByteArrayElements(env,pixelsArray,pixels,JNI_ABORT);
ANativeWindow_unlockAndPost(window);
}
ANativeWindow_release(window);
}
return;
}

View File

@@ -0,0 +1,23 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
MY_CPP_LIST := $(wildcard $(LOCAL_PATH)/librtmp/*.c)
#MY_CPP_LIST :=librtmp/amf.c
#MY_CPP_LIST +=librtmp/hashswf.c
#MY_CPP_LIST +=librtmp/log.c
#MY_CPP_LIST := libresrtmp.c
LOCAL_SRC_FILES := $(MY_CPP_LIST)
LOCAL_C_INCLUDES := $(LOCAL_PATH)/librtmp
LOCAL_C_INCLUDES += libresrtmp.h
LOCAL_C_INCLUDES += log.h
LOCAL_CFLAGS += -DNO_CRYPTO
LOCAL_MODULE := libresrtmp
LOCAL_LDLIBS := -llog
include $(BUILD_SHARED_LIBRARY)

View File

@@ -0,0 +1,141 @@
#include <malloc.h>
#include "libresrtmp.h"
#include "rtmp.h"
/*
* Class: me_lake_librestreaming_rtmp_RtmpClient
* Method: open
* Signature: (Ljava/lang/String;)I
*/
JNIEXPORT jlong JNICALL Java_me_lake_librestreaming_rtmp_RtmpClient_open(JNIEnv * env, jobject thiz, jstring url_, jboolean isPublishMode) {
const char *url = (*env)->GetStringUTFChars(env, url_, 0);
LOGD("RTMP_OPENING:%s",url);
RTMP* rtmp = RTMP_Alloc();
if (rtmp == NULL) {
LOGD("RTMP_Alloc=NULL");
return NULL;
}
RTMP_Init(rtmp);
int ret = RTMP_SetupURL(rtmp, url);
if (!ret) {
RTMP_Free(rtmp);
rtmp=NULL;
LOGD("RTMP_SetupURL=ret");
return NULL;
}
if (isPublishMode) {
RTMP_EnableWrite(rtmp);
}
ret = RTMP_Connect(rtmp, NULL);
if (!ret) {
RTMP_Free(rtmp);
rtmp=NULL;
LOGD("RTMP_Connect=ret");
return NULL;
}
ret = RTMP_ConnectStream(rtmp, 0);
if (!ret) {
ret = RTMP_ConnectStream(rtmp, 0);
RTMP_Close(rtmp);
RTMP_Free(rtmp);
rtmp=NULL;
LOGD("RTMP_ConnectStream=ret");
return NULL;
}
(*env)->ReleaseStringUTFChars(env, url_, url);
LOGD("RTMP_OPENED");
return rtmp;
}
/*
* Class: me_lake_librestreaming_rtmp_RtmpClient
* Method: read
* Signature: ([CI)I
*/
JNIEXPORT jint JNICALL Java_me_lake_librestreaming_rtmp_RtmpClient_read
(JNIEnv * env, jobject thiz,jlong rtmp, jbyteArray data_, jint offset, jint size) {
char* data = malloc(size*sizeof(char));
int readCount = RTMP_Read((RTMP*)rtmp, data, size);
if (readCount > 0) {
(*env)->SetByteArrayRegion(env, data_, offset, readCount, data); // copy
}
free(data);
return readCount;
}
/*
* Class: me_lake_librestreaming_rtmp_RtmpClient
* Method: write
* Signature: ([CI)I
*/
JNIEXPORT jint JNICALL Java_me_lake_librestreaming_rtmp_RtmpClient_write
(JNIEnv * env, jobject thiz,jlong rtmp, jbyteArray data, jint size, jint type, jint ts) {
//LOGD("start write");
jbyte *buffer = (*env)->GetByteArrayElements(env, data, NULL);
RTMPPacket *packet = (RTMPPacket*)malloc(sizeof(RTMPPacket));
RTMPPacket_Alloc(packet, size);
RTMPPacket_Reset(packet);
if (type == RTMP_PACKET_TYPE_INFO) { // metadata
packet->m_nChannel = 0x03;
} else if (type == RTMP_PACKET_TYPE_VIDEO) { // video
packet->m_nChannel = 0x04;
} else if (type == RTMP_PACKET_TYPE_AUDIO) { //audio
packet->m_nChannel = 0x05;
} else {
packet->m_nChannel = -1;
}
packet->m_nInfoField2 = ((RTMP*)rtmp)->m_stream_id;
//LOGD("write data type: %d, ts %d", type, ts);
memcpy(packet->m_body, buffer, size);
packet->m_headerType = RTMP_PACKET_SIZE_LARGE;
packet->m_hasAbsTimestamp = FALSE;
packet->m_nTimeStamp = ts;
packet->m_packetType = type;
packet->m_nBodySize = size;
int ret = RTMP_SendPacket((RTMP*)rtmp, packet, 0);
RTMPPacket_Free(packet);
free(packet);
(*env)->ReleaseByteArrayElements(env, data, buffer, 0);
if (!ret) {
//LOGD("end write error %d", _sockerr);
return _sockerr;
}else
{
//LOGD("end write success");
return 0;
}
}
/*
* Class: me_lake_librestreaming_rtmp_RtmpClient
* Method: close
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_me_lake_librestreaming_rtmp_RtmpClient_close
(JNIEnv * env,jlong rtmp, jobject thiz) {
RTMP_Close((RTMP*)rtmp);
RTMP_Free((RTMP*)rtmp);
return 0;
}
JNIEXPORT jstring JNICALL Java_me_lake_librestreaming_rtmp_RtmpClient_getIpAddr
(JNIEnv * env,jobject thiz,jlong rtmp) {
if(rtmp!=0){
RTMP* r= (RTMP*)rtmp;
return (*env)->NewStringUTF(env, r->ipaddr);
}else {
return (*env)->NewStringUTF(env, "");
}
}

View File

@@ -0,0 +1,45 @@
/* DO NOT EDIT THIS FILE - it is machine generated */
#include <android/log.h>
#include <jni.h>
#include <stddef.h>
#include "log.h"
/* Header for class me_lake_librestreaming_rtmp_RtmpClient */
#ifndef _Included_me_lake_librestreaming_rtmp_RtmpClient
#define _Included_me_lake_librestreaming_rtmp_RtmpClient
#ifdef __cplusplus
extern "C" {
#endif
JNIEXPORT jlong JNICALL Java_me_lake_librestreaming_rtmp_RtmpClient_open
(JNIEnv * env, jobject thiz, jstring url_, jboolean isPublishMode);
/*
* Class: me_lake_librestreaming_rtmp_RtmpClient
* Method: read
* Signature: ([CI)I
*/
JNIEXPORT jint JNICALL Java_me_lake_librestreaming_rtmp_RtmpClient_read
(JNIEnv * env, jobject thiz,jlong rtmp, jbyteArray data_, jint offset, jint size);
/*
* Class: me_lake_librestreaming_rtmp_RtmpClient
* Method: write
* Signature: ([CI)I
*/
JNIEXPORT jint JNICALL Java_me_lake_librestreaming_rtmp_RtmpClient_write
(JNIEnv * env, jobject thiz,jlong rtmp, jbyteArray data, jint size, jint type, jint ts);
/*
* Class: me_lake_librestreaming_rtmp_RtmpClient
* Method: close
* Signature: ()I
*/
JNIEXPORT jint JNICALL Java_me_lake_librestreaming_rtmp_RtmpClient_close
(JNIEnv * env,jlong rtmp, jobject thiz);
#ifdef __cplusplus
}
#endif
#endif

View File

@@ -0,0 +1,504 @@
GNU LESSER GENERAL PUBLIC LICENSE
Version 2.1, February 1999
Copyright (C) 1991, 1999 Free Software Foundation, Inc.
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
[This is the first released version of the Lesser GPL. It also counts
as the successor of the GNU Library Public License, version 2, hence
the version number 2.1.]
Preamble
The licenses for most software are designed to take away your
freedom to share and change it. By contrast, the GNU General Public
Licenses are intended to guarantee your freedom to share and change
free software--to make sure the software is free for all its users.
This license, the Lesser General Public License, applies to some
specially designated software packages--typically libraries--of the
Free Software Foundation and other authors who decide to use it. You
can use it too, but we suggest you first think carefully about whether
this license or the ordinary General Public License is the better
strategy to use in any particular case, based on the explanations below.
When we speak of free software, we are referring to freedom of use,
not price. Our General Public Licenses are designed to make sure that
you have the freedom to distribute copies of free software (and charge
for this service if you wish); that you receive source code or can get
it if you want it; that you can change the software and use pieces of
it in new free programs; and that you are informed that you can do
these things.
To protect your rights, we need to make restrictions that forbid
distributors to deny you these rights or to ask you to surrender these
rights. These restrictions translate to certain responsibilities for
you if you distribute copies of the library or if you modify it.
For example, if you distribute copies of the library, whether gratis
or for a fee, you must give the recipients all the rights that we gave
you. You must make sure that they, too, receive or can get the source
code. If you link other code with the library, you must provide
complete object files to the recipients, so that they can relink them
with the library after making changes to the library and recompiling
it. And you must show them these terms so they know their rights.
We protect your rights with a two-step method: (1) we copyright the
library, and (2) we offer you this license, which gives you legal
permission to copy, distribute and/or modify the library.
To protect each distributor, we want to make it very clear that
there is no warranty for the free library. Also, if the library is
modified by someone else and passed on, the recipients should know
that what they have is not the original version, so that the original
author's reputation will not be affected by problems that might be
introduced by others.
Finally, software patents pose a constant threat to the existence of
any free program. We wish to make sure that a company cannot
effectively restrict the users of a free program by obtaining a
restrictive license from a patent holder. Therefore, we insist that
any patent license obtained for a version of the library must be
consistent with the full freedom of use specified in this license.
Most GNU software, including some libraries, is covered by the
ordinary GNU General Public License. This license, the GNU Lesser
General Public License, applies to certain designated libraries, and
is quite different from the ordinary General Public License. We use
this license for certain libraries in order to permit linking those
libraries into non-free programs.
When a program is linked with a library, whether statically or using
a shared library, the combination of the two is legally speaking a
combined work, a derivative of the original library. The ordinary
General Public License therefore permits such linking only if the
entire combination fits its criteria of freedom. The Lesser General
Public License permits more lax criteria for linking other code with
the library.
We call this license the "Lesser" General Public License because it
does Less to protect the user's freedom than the ordinary General
Public License. It also provides other free software developers Less
of an advantage over competing non-free programs. These disadvantages
are the reason we use the ordinary General Public License for many
libraries. However, the Lesser license provides advantages in certain
special circumstances.
For example, on rare occasions, there may be a special need to
encourage the widest possible use of a certain library, so that it becomes
a de-facto standard. To achieve this, non-free programs must be
allowed to use the library. A more frequent case is that a free
library does the same job as widely used non-free libraries. In this
case, there is little to gain by limiting the free library to free
software only, so we use the Lesser General Public License.
In other cases, permission to use a particular library in non-free
programs enables a greater number of people to use a large body of
free software. For example, permission to use the GNU C Library in
non-free programs enables many more people to use the whole GNU
operating system, as well as its variant, the GNU/Linux operating
system.
Although the Lesser General Public License is Less protective of the
users' freedom, it does ensure that the user of a program that is
linked with the Library has the freedom and the wherewithal to run
that program using a modified version of the Library.
The precise terms and conditions for copying, distribution and
modification follow. Pay close attention to the difference between a
"work based on the library" and a "work that uses the library". The
former contains code derived from the library, whereas the latter must
be combined with the library in order to run.
GNU LESSER GENERAL PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
0. This License Agreement applies to any software library or other
program which contains a notice placed by the copyright holder or
other authorized party saying it may be distributed under the terms of
this Lesser General Public License (also called "this License").
Each licensee is addressed as "you".
A "library" means a collection of software functions and/or data
prepared so as to be conveniently linked with application programs
(which use some of those functions and data) to form executables.
The "Library", below, refers to any such software library or work
which has been distributed under these terms. A "work based on the
Library" means either the Library or any derivative work under
copyright law: that is to say, a work containing the Library or a
portion of it, either verbatim or with modifications and/or translated
straightforwardly into another language. (Hereinafter, translation is
included without limitation in the term "modification".)
"Source code" for a work means the preferred form of the work for
making modifications to it. For a library, complete source code means
all the source code for all modules it contains, plus any associated
interface definition files, plus the scripts used to control compilation
and installation of the library.
Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope. The act of
running a program using the Library is not restricted, and output from
such a program is covered only if its contents constitute a work based
on the Library (independent of the use of the Library in a tool for
writing it). Whether that is true depends on what the Library does
and what the program that uses the Library does.
1. You may copy and distribute verbatim copies of the Library's
complete source code as you receive it, in any medium, provided that
you conspicuously and appropriately publish on each copy an
appropriate copyright notice and disclaimer of warranty; keep intact
all the notices that refer to this License and to the absence of any
warranty; and distribute a copy of this License along with the
Library.
You may charge a fee for the physical act of transferring a copy,
and you may at your option offer warranty protection in exchange for a
fee.
2. You may modify your copy or copies of the Library or any portion
of it, thus forming a work based on the Library, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:
a) The modified work must itself be a software library.
b) You must cause the files modified to carry prominent notices
stating that you changed the files and the date of any change.
c) You must cause the whole of the work to be licensed at no
charge to all third parties under the terms of this License.
d) If a facility in the modified Library refers to a function or a
table of data to be supplied by an application program that uses
the facility, other than as an argument passed when the facility
is invoked, then you must make a good faith effort to ensure that,
in the event an application does not supply such function or
table, the facility still operates, and performs whatever part of
its purpose remains meaningful.
(For example, a function in a library to compute square roots has
a purpose that is entirely well-defined independent of the
application. Therefore, Subsection 2d requires that any
application-supplied function or table used by this function must
be optional: if the application does not supply it, the square
root function must still compute square roots.)
These requirements apply to the modified work as a whole. If
identifiable sections of that work are not derived from the Library,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works. But when you
distribute the same sections as part of a whole which is a work based
on the Library, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote
it.
Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Library.
In addition, mere aggregation of another work not based on the Library
with the Library (or with a work based on the Library) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.
3. You may opt to apply the terms of the ordinary GNU General Public
License instead of this License to a given copy of the Library. To do
this, you must alter all the notices that refer to this License, so
that they refer to the ordinary GNU General Public License, version 2,
instead of to this License. (If a newer version than version 2 of the
ordinary GNU General Public License has appeared, then you can specify
that version instead if you wish.) Do not make any other change in
these notices.
Once this change is made in a given copy, it is irreversible for
that copy, so the ordinary GNU General Public License applies to all
subsequent copies and derivative works made from that copy.
This option is useful when you wish to copy part of the code of
the Library into a program that is not a library.
4. You may copy and distribute the Library (or a portion or
derivative of it, under Section 2) in object code or executable form
under the terms of Sections 1 and 2 above provided that you accompany
it with the complete corresponding machine-readable source code, which
must be distributed under the terms of Sections 1 and 2 above on a
medium customarily used for software interchange.
If distribution of object code is made by offering access to copy
from a designated place, then offering equivalent access to copy the
source code from the same place satisfies the requirement to
distribute the source code, even though third parties are not
compelled to copy the source along with the object code.
5. A program that contains no derivative of any portion of the
Library, but is designed to work with the Library by being compiled or
linked with it, is called a "work that uses the Library". Such a
work, in isolation, is not a derivative work of the Library, and
therefore falls outside the scope of this License.
However, linking a "work that uses the Library" with the Library
creates an executable that is a derivative of the Library (because it
contains portions of the Library), rather than a "work that uses the
library". The executable is therefore covered by this License.
Section 6 states terms for distribution of such executables.
When a "work that uses the Library" uses material from a header file
that is part of the Library, the object code for the work may be a
derivative work of the Library even though the source code is not.
Whether this is true is especially significant if the work can be
linked without the Library, or if the work is itself a library. The
threshold for this to be true is not precisely defined by law.
If such an object file uses only numerical parameters, data
structure layouts and accessors, and small macros and small inline
functions (ten lines or less in length), then the use of the object
file is unrestricted, regardless of whether it is legally a derivative
work. (Executables containing this object code plus portions of the
Library will still fall under Section 6.)
Otherwise, if the work is a derivative of the Library, you may
distribute the object code for the work under the terms of Section 6.
Any executables containing that work also fall under Section 6,
whether or not they are linked directly with the Library itself.
6. As an exception to the Sections above, you may also combine or
link a "work that uses the Library" with the Library to produce a
work containing portions of the Library, and distribute that work
under terms of your choice, provided that the terms permit
modification of the work for the customer's own use and reverse
engineering for debugging such modifications.
You must give prominent notice with each copy of the work that the
Library is used in it and that the Library and its use are covered by
this License. You must supply a copy of this License. If the work
during execution displays copyright notices, you must include the
copyright notice for the Library among them, as well as a reference
directing the user to the copy of this License. Also, you must do one
of these things:
a) Accompany the work with the complete corresponding
machine-readable source code for the Library including whatever
changes were used in the work (which must be distributed under
Sections 1 and 2 above); and, if the work is an executable linked
with the Library, with the complete machine-readable "work that
uses the Library", as object code and/or source code, so that the
user can modify the Library and then relink to produce a modified
executable containing the modified Library. (It is understood
that the user who changes the contents of definitions files in the
Library will not necessarily be able to recompile the application
to use the modified definitions.)
b) Use a suitable shared library mechanism for linking with the
Library. A suitable mechanism is one that (1) uses at run time a
copy of the library already present on the user's computer system,
rather than copying library functions into the executable, and (2)
will operate properly with a modified version of the library, if
the user installs one, as long as the modified version is
interface-compatible with the version that the work was made with.
c) Accompany the work with a written offer, valid for at
least three years, to give the same user the materials
specified in Subsection 6a, above, for a charge no more
than the cost of performing this distribution.
d) If distribution of the work is made by offering access to copy
from a designated place, offer equivalent access to copy the above
specified materials from the same place.
e) Verify that the user has already received a copy of these
materials or that you have already sent this user a copy.
For an executable, the required form of the "work that uses the
Library" must include any data and utility programs needed for
reproducing the executable from it. However, as a special exception,
the materials to be distributed need not include anything that is
normally distributed (in either source or binary form) with the major
components (compiler, kernel, and so on) of the operating system on
which the executable runs, unless that component itself accompanies
the executable.
It may happen that this requirement contradicts the license
restrictions of other proprietary libraries that do not normally
accompany the operating system. Such a contradiction means you cannot
use both them and the Library together in an executable that you
distribute.
7. You may place library facilities that are a work based on the
Library side-by-side in a single library together with other library
facilities not covered by this License, and distribute such a combined
library, provided that the separate distribution of the work based on
the Library and of the other library facilities is otherwise
permitted, and provided that you do these two things:
a) Accompany the combined library with a copy of the same work
based on the Library, uncombined with any other library
facilities. This must be distributed under the terms of the
Sections above.
b) Give prominent notice with the combined library of the fact
that part of it is a work based on the Library, and explaining
where to find the accompanying uncombined form of the same work.
8. You may not copy, modify, sublicense, link with, or distribute
the Library except as expressly provided under this License. Any
attempt otherwise to copy, modify, sublicense, link with, or
distribute the Library is void, and will automatically terminate your
rights under this License. However, parties who have received copies,
or rights, from you under this License will not have their licenses
terminated so long as such parties remain in full compliance.
9. You are not required to accept this License, since you have not
signed it. However, nothing else grants you permission to modify or
distribute the Library or its derivative works. These actions are
prohibited by law if you do not accept this License. Therefore, by
modifying or distributing the Library (or any work based on the
Library), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Library or works based on it.
10. Each time you redistribute the Library (or any work based on the
Library), the recipient automatically receives a license from the
original licensor to copy, distribute, link with or modify the Library
subject to these terms and conditions. You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties with
this License.
11. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Library at all. For example, if a patent
license would not permit royalty-free redistribution of the Library by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Library.
If any portion of this section is held invalid or unenforceable under any
particular circumstance, the balance of the section is intended to apply,
and the section as a whole is intended to apply in other circumstances.
It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system which is
implemented by public license practices. Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.
This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.
12. If the distribution and/or use of the Library is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Library under this License may add
an explicit geographical distribution limitation excluding those countries,
so that distribution is permitted only in or among countries not thus
excluded. In such case, this License incorporates the limitation as if
written in the body of this License.
13. The Free Software Foundation may publish revised and/or new
versions of the Lesser General Public License from time to time.
Such new versions will be similar in spirit to the present version,
but may differ in detail to address new problems or concerns.
Each version is given a distinguishing version number. If the Library
specifies a version number of this License which applies to it and
"any later version", you have the option of following the terms and
conditions either of that version or of any later version published by
the Free Software Foundation. If the Library does not specify a
license version number, you may choose any version ever published by
the Free Software Foundation.
14. If you wish to incorporate parts of the Library into other free
programs whose distribution conditions are incompatible with these,
write to the author to ask for permission. For software which is
copyrighted by the Free Software Foundation, write to the Free
Software Foundation; we sometimes make exceptions for this. Our
decision will be guided by the two goals of preserving the free status
of all derivatives of our free software and of promoting the sharing
and reuse of software generally.
NO WARRANTY
15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
DAMAGES.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Libraries
If you develop a new library, and you want it to be of the greatest
possible use to the public, we recommend making it free software that
everyone can redistribute and change. You can do so by permitting
redistribution under these terms (or, alternatively, under the terms of the
ordinary General Public License).
To apply these terms, attach the following notices to the library. It is
safest to attach them to the start of each source file to most effectively
convey the exclusion of warranty; and each file should have at least the
"copyright" line and a pointer to where the full notice is found.
<one line to give the library's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Also add information on how to contact you by electronic and paper mail.
You should also get your employer (if you work as a programmer) or your
school, if any, to sign a "copyright disclaimer" for the library, if
necessary. Here is a sample; alter the names:
Yoyodyne, Inc., hereby disclaims all copyright interest in the
library `Frob' (a library for tweaking knobs) written by James Random Hacker.
<signature of Ty Coon>, 1 April 1990
Ty Coon, President of Vice
That's all there is to it!

View File

@@ -0,0 +1,118 @@
VERSION=v2.4
prefix=/usr/local
incdir=$(prefix)/include/librtmp
bindir=$(prefix)/bin
libdir=$(prefix)/lib
mandir=$(prefix)/man
BINDIR=$(DESTDIR)$(bindir)
INCDIR=$(DESTDIR)$(incdir)
LIBDIR=$(DESTDIR)$(libdir)
MANDIR=$(DESTDIR)$(mandir)
CC=$(CROSS_COMPILE)gcc
LD=$(CROSS_COMPILE)ld
AR=$(CROSS_COMPILE)ar
SYS=posix
CRYPTO=OPENSSL
#CRYPTO=GNUTLS
DEF_POLARSSL=-DUSE_POLARSSL
DEF_OPENSSL=-DUSE_OPENSSL
DEF_GNUTLS=-DUSE_GNUTLS
DEF_=-DNO_CRYPTO
REQ_GNUTLS=gnutls
REQ_OPENSSL=libssl,libcrypto
LIBZ=-lz
LIBS_posix=
LIBS_darwin=
LIBS_mingw=-lws2_32 -lwinmm -lgdi32
LIB_GNUTLS=-lgnutls -lhogweed -lnettle -lgmp $(LIBZ)
LIB_OPENSSL=-lssl -lcrypto $(LIBZ)
LIB_POLARSSL=-lpolarssl $(LIBZ)
PRIVATE_LIBS=$(LIBS_$(SYS))
CRYPTO_LIB=$(LIB_$(CRYPTO)) $(PRIVATE_LIBS)
CRYPTO_REQ=$(REQ_$(CRYPTO))
CRYPTO_DEF=$(DEF_$(CRYPTO))
SO_VERSION=0
SOX_posix=so
SOX_darwin=dylib
SOX_mingw=dll
SOX=$(SOX_$(SYS))
SO_posix=.$(SOX).$(SO_VERSION)
SO_darwin=.$(SO_VERSION).$(SOX)
SO_mingw=-$(SO_VERSION).$(SOX)
SO_EXT=$(SO_$(SYS))
SODIR_posix=$(LIBDIR)
SODIR_darwin=$(LIBDIR)
SODIR_mingw=$(BINDIR)
SODIR=$(SODIR_$(SYS))
SO_LDFLAGS_posix=-shared -Wl,-soname,$@
SO_LDFLAGS_darwin=-dynamiclib -twolevel_namespace -undefined dynamic_lookup \
-fno-common -headerpad_max_install_names -install_name $(libdir)/$@
SO_LDFLAGS_mingw=-shared -Wl,--out-implib,librtmp.dll.a
SO_LDFLAGS=$(SO_LDFLAGS_$(SYS))
INSTALL_IMPLIB_posix=
INSTALL_IMPLIB_darwin=
INSTALL_IMPLIB_mingw=cp librtmp.dll.a $(LIBDIR)
INSTALL_IMPLIB=$(INSTALL_IMPLIB_$(SYS))
SHARED=yes
SODEF_yes=-fPIC
SOLIB_yes=librtmp$(SO_EXT)
SOINST_yes=install_so
SO_DEF=$(SODEF_$(SHARED))
SO_LIB=$(SOLIB_$(SHARED))
SO_INST=$(SOINST_$(SHARED))
DEF=-DRTMPDUMP_VERSION=\"$(VERSION)\" $(CRYPTO_DEF) $(XDEF)
OPT=-O2
CFLAGS=-Wall $(XCFLAGS) $(INC) $(DEF) $(OPT) $(SO_DEF)
LDFLAGS=$(XLDFLAGS)
OBJS=rtmp.o log.o amf.o hashswf.o parseurl.o
all: librtmp.a $(SO_LIB)
clean:
rm -f *.o *.a *.$(SOX) *$(SO_EXT) librtmp.pc
librtmp.a: $(OBJS)
$(AR) rs $@ $?
librtmp$(SO_EXT): $(OBJS)
$(CC) $(SO_LDFLAGS) $(LDFLAGS) -o $@ $^ $> $(CRYPTO_LIB)
ln -sf $@ librtmp.$(SOX)
log.o: log.c log.h Makefile
rtmp.o: rtmp.c rtmp.h rtmp_sys.h handshake.h dh.h log.h amf.h Makefile
amf.o: amf.c amf.h bytes.h log.h Makefile
hashswf.o: hashswf.c http.h rtmp.h rtmp_sys.h Makefile
parseurl.o: parseurl.c rtmp.h rtmp_sys.h log.h Makefile
librtmp.pc: librtmp.pc.in Makefile
sed -e "s;@prefix@;$(prefix);" -e "s;@libdir@;$(libdir);" \
-e "s;@VERSION@;$(VERSION);" \
-e "s;@CRYPTO_REQ@;$(CRYPTO_REQ);" \
-e "s;@PRIVATE_LIBS@;$(PRIVATE_LIBS);" librtmp.pc.in > $@
install: install_base $(SO_INST)
install_base: librtmp.a librtmp.pc
-mkdir -p $(INCDIR) $(LIBDIR)/pkgconfig $(MANDIR)/man3 $(SODIR)
cp amf.h http.h log.h rtmp.h $(INCDIR)
cp librtmp.a $(LIBDIR)
cp librtmp.pc $(LIBDIR)/pkgconfig
cp librtmp.3 $(MANDIR)/man3
install_so: librtmp$(SO_EXT)
cp librtmp$(SO_EXT) $(SODIR)
$(INSTALL_IMPLIB)
cd $(SODIR); ln -sf librtmp$(SO_EXT) librtmp.$(SOX)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,164 @@
#ifndef __AMF_H__
#define __AMF_H__
/*
* Copyright (C) 2005-2008 Team XBMC
* http://www.xbmc.org
* Copyright (C) 2008-2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#include <stdint.h>
#ifndef TRUE
#define TRUE 1
#define FALSE 0
#endif
#ifdef __cplusplus
extern "C"
{
#endif
typedef enum
{ AMF_NUMBER = 0, AMF_BOOLEAN, AMF_STRING, AMF_OBJECT,
AMF_MOVIECLIP, /* reserved, not used */
AMF_NULL, AMF_UNDEFINED, AMF_REFERENCE, AMF_ECMA_ARRAY, AMF_OBJECT_END,
AMF_STRICT_ARRAY, AMF_DATE, AMF_LONG_STRING, AMF_UNSUPPORTED,
AMF_RECORDSET, /* reserved, not used */
AMF_XML_DOC, AMF_TYPED_OBJECT,
AMF_AVMPLUS, /* switch to AMF3 */
AMF_INVALID = 0xff
} AMFDataType;
typedef enum
{ AMF3_UNDEFINED = 0, AMF3_NULL, AMF3_FALSE, AMF3_TRUE,
AMF3_INTEGER, AMF3_DOUBLE, AMF3_STRING, AMF3_XML_DOC, AMF3_DATE,
AMF3_ARRAY, AMF3_OBJECT, AMF3_XML, AMF3_BYTE_ARRAY
} AMF3DataType;
typedef struct AVal
{
char *av_val;
int av_len;
} AVal;
#define AVC(str) {str,sizeof(str)-1}
#define AVMATCH(a1,a2) ((a1)->av_len == (a2)->av_len && !memcmp((a1)->av_val,(a2)->av_val,(a1)->av_len))
struct AMFObjectProperty;
typedef struct AMFObject
{
int o_num;
struct AMFObjectProperty *o_props;
} AMFObject;
typedef struct AMFObjectProperty
{
AVal p_name;
AMFDataType p_type;
union
{
double p_number;
AVal p_aval;
AMFObject p_object;
} p_vu;
int16_t p_UTCoffset;
} AMFObjectProperty;
char *AMF_EncodeString(char *output, char *outend, const AVal * str);
char *AMF_EncodeNumber(char *output, char *outend, double dVal);
char *AMF_EncodeInt16(char *output, char *outend, short nVal);
char *AMF_EncodeInt24(char *output, char *outend, int nVal);
char *AMF_EncodeInt32(char *output, char *outend, int nVal);
char *AMF_EncodeBoolean(char *output, char *outend, int bVal);
/* Shortcuts for AMFProp_Encode */
char *AMF_EncodeNamedString(char *output, char *outend, const AVal * name, const AVal * value);
char *AMF_EncodeNamedNumber(char *output, char *outend, const AVal * name, double dVal);
char *AMF_EncodeNamedBoolean(char *output, char *outend, const AVal * name, int bVal);
unsigned short AMF_DecodeInt16(const char *data);
unsigned int AMF_DecodeInt24(const char *data);
unsigned int AMF_DecodeInt32(const char *data);
void AMF_DecodeString(const char *data, AVal * str);
void AMF_DecodeLongString(const char *data, AVal * str);
int AMF_DecodeBoolean(const char *data);
double AMF_DecodeNumber(const char *data);
char *AMF_Encode(AMFObject * obj, char *pBuffer, char *pBufEnd);
char *AMF_EncodeEcmaArray(AMFObject *obj, char *pBuffer, char *pBufEnd);
char *AMF_EncodeArray(AMFObject *obj, char *pBuffer, char *pBufEnd);
int AMF_Decode(AMFObject * obj, const char *pBuffer, int nSize,
int bDecodeName);
int AMF_DecodeArray(AMFObject * obj, const char *pBuffer, int nSize,
int nArrayLen, int bDecodeName);
int AMF3_Decode(AMFObject * obj, const char *pBuffer, int nSize,
int bDecodeName);
void AMF_Dump(AMFObject * obj);
void AMF_Reset(AMFObject * obj);
void AMF_AddProp(AMFObject * obj, const AMFObjectProperty * prop);
int AMF_CountProp(AMFObject * obj);
AMFObjectProperty *AMF_GetProp(AMFObject * obj, const AVal * name,
int nIndex);
AMFDataType AMFProp_GetType(AMFObjectProperty * prop);
void AMFProp_SetNumber(AMFObjectProperty * prop, double dval);
void AMFProp_SetBoolean(AMFObjectProperty * prop, int bflag);
void AMFProp_SetString(AMFObjectProperty * prop, AVal * str);
void AMFProp_SetObject(AMFObjectProperty * prop, AMFObject * obj);
void AMFProp_GetName(AMFObjectProperty * prop, AVal * name);
void AMFProp_SetName(AMFObjectProperty * prop, AVal * name);
double AMFProp_GetNumber(AMFObjectProperty * prop);
int AMFProp_GetBoolean(AMFObjectProperty * prop);
void AMFProp_GetString(AMFObjectProperty * prop, AVal * str);
void AMFProp_GetObject(AMFObjectProperty * prop, AMFObject * obj);
int AMFProp_IsValid(AMFObjectProperty * prop);
char *AMFProp_Encode(AMFObjectProperty * prop, char *pBuffer, char *pBufEnd);
int AMF3Prop_Decode(AMFObjectProperty * prop, const char *pBuffer,
int nSize, int bDecodeName);
int AMFProp_Decode(AMFObjectProperty * prop, const char *pBuffer,
int nSize, int bDecodeName);
void AMFProp_Dump(AMFObjectProperty * prop);
void AMFProp_Reset(AMFObjectProperty * prop);
typedef struct AMF3ClassDef
{
AVal cd_name;
char cd_externalizable;
char cd_dynamic;
int cd_num;
AVal *cd_props;
} AMF3ClassDef;
void AMF3CD_AddProp(AMF3ClassDef * cd, AVal * prop);
AVal *AMF3CD_GetProp(AMF3ClassDef * cd, int idx);
#ifdef __cplusplus
}
#endif
#endif /* __AMF_H__ */

View File

@@ -0,0 +1,91 @@
/*
* Copyright (C) 2005-2008 Team XBMC
* http://www.xbmc.org
* Copyright (C) 2008-2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#ifndef __BYTES_H__
#define __BYTES_H__
#include <stdint.h>
#ifdef _WIN32
/* Windows is little endian only */
#define __LITTLE_ENDIAN 1234
#define __BIG_ENDIAN 4321
#define __BYTE_ORDER __LITTLE_ENDIAN
#define __FLOAT_WORD_ORDER __BYTE_ORDER
typedef unsigned char uint8_t;
#else /* !_WIN32 */
#include <sys/param.h>
#if defined(BYTE_ORDER) && !defined(__BYTE_ORDER)
#define __BYTE_ORDER BYTE_ORDER
#endif
#if defined(BIG_ENDIAN) && !defined(__BIG_ENDIAN)
#define __BIG_ENDIAN BIG_ENDIAN
#endif
#if defined(LITTLE_ENDIAN) && !defined(__LITTLE_ENDIAN)
#define __LITTLE_ENDIAN LITTLE_ENDIAN
#endif
#endif /* !_WIN32 */
/* define default endianness */
#ifndef __LITTLE_ENDIAN
#define __LITTLE_ENDIAN 1234
#endif
#ifndef __BIG_ENDIAN
#define __BIG_ENDIAN 4321
#endif
#ifndef __BYTE_ORDER
#warning "Byte order not defined on your system, assuming little endian!"
#define __BYTE_ORDER __LITTLE_ENDIAN
#endif
/* ok, we assume to have the same float word order and byte order if float word order is not defined */
#ifndef __FLOAT_WORD_ORDER
#warning "Float word order not defined, assuming the same as byte order!"
#define __FLOAT_WORD_ORDER __BYTE_ORDER
#endif
#if !defined(__BYTE_ORDER) || !defined(__FLOAT_WORD_ORDER)
#error "Undefined byte or float word order!"
#endif
#if __FLOAT_WORD_ORDER != __BIG_ENDIAN && __FLOAT_WORD_ORDER != __LITTLE_ENDIAN
#error "Unknown/unsupported float word order!"
#endif
#if __BYTE_ORDER != __BIG_ENDIAN && __BYTE_ORDER != __LITTLE_ENDIAN
#error "Unknown/unsupported byte order!"
#endif
#endif

View File

@@ -0,0 +1,376 @@
/* RTMPDump - Diffie-Hellmann Key Exchange
* Copyright (C) 2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#include <stdint.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <limits.h>
#ifdef USE_POLARSSL
#include <polarssl/dhm.h>
typedef mpi * MP_t;
#define MP_new(m) m = malloc(sizeof(mpi)); mpi_init(m)
#define MP_set_w(mpi, w) mpi_lset(mpi, w)
#define MP_cmp(u, v) mpi_cmp_mpi(u, v)
#define MP_set(u, v) mpi_copy(u, v)
#define MP_sub_w(mpi, w) mpi_sub_int(mpi, mpi, w)
#define MP_cmp_1(mpi) mpi_cmp_int(mpi, 1)
#define MP_modexp(r, y, q, p) mpi_exp_mod(r, y, q, p, NULL)
#define MP_free(mpi) mpi_free(mpi); free(mpi)
#define MP_gethex(u, hex, res) MP_new(u); res = mpi_read_string(u, 16, hex) == 0
#define MP_bytes(u) mpi_size(u)
#define MP_setbin(u,buf,len) mpi_write_binary(u,buf,len)
#define MP_getbin(u,buf,len) MP_new(u); mpi_read_binary(u,buf,len)
typedef struct MDH {
MP_t p;
MP_t g;
MP_t pub_key;
MP_t priv_key;
long length;
dhm_context ctx;
} MDH;
#define MDH_new() calloc(1,sizeof(MDH))
#define MDH_free(vp) {MDH *_dh = vp; dhm_free(&_dh->ctx); MP_free(_dh->p); MP_free(_dh->g); MP_free(_dh->pub_key); MP_free(_dh->priv_key); free(_dh);}
static int MDH_generate_key(MDH *dh)
{
unsigned char out[2];
MP_set(&dh->ctx.P, dh->p);
MP_set(&dh->ctx.G, dh->g);
dh->ctx.len = 128;
dhm_make_public(&dh->ctx, 1024, out, 1, havege_random, &RTMP_TLS_ctx->hs);
MP_new(dh->pub_key);
MP_new(dh->priv_key);
MP_set(dh->pub_key, &dh->ctx.GX);
MP_set(dh->priv_key, &dh->ctx.X);
return 1;
}
static int MDH_compute_key(uint8_t *secret, size_t len, MP_t pub, MDH *dh)
{
MP_set(&dh->ctx.GY, pub);
dhm_calc_secret(&dh->ctx, secret, &len);
return 0;
}
#elif defined(USE_GNUTLS)
#include <gmp.h>
#include <nettle/bignum.h>
#include <gnutls/crypto.h>
typedef mpz_ptr MP_t;
#define MP_new(m) m = malloc(sizeof(*m)); mpz_init2(m, 1)
#define MP_set_w(mpi, w) mpz_set_ui(mpi, w)
#define MP_cmp(u, v) mpz_cmp(u, v)
#define MP_set(u, v) mpz_set(u, v)
#define MP_sub_w(mpi, w) mpz_sub_ui(mpi, mpi, w)
#define MP_cmp_1(mpi) mpz_cmp_ui(mpi, 1)
#define MP_modexp(r, y, q, p) mpz_powm(r, y, q, p)
#define MP_free(mpi) mpz_clear(mpi); free(mpi)
#define MP_gethex(u, hex, res) u = malloc(sizeof(*u)); mpz_init2(u, 1); res = (mpz_set_str(u, hex, 16) == 0)
#define MP_bytes(u) (mpz_sizeinbase(u, 2) + 7) / 8
#define MP_setbin(u,buf,len) nettle_mpz_get_str_256(len,buf,u)
#define MP_getbin(u,buf,len) u = malloc(sizeof(*u)); mpz_init2(u, 1); nettle_mpz_set_str_256_u(u,len,buf)
typedef struct MDH {
MP_t p;
MP_t g;
MP_t pub_key;
MP_t priv_key;
long length;
} MDH;
#define MDH_new() calloc(1,sizeof(MDH))
#define MDH_free(dh) do {MP_free(((MDH*)(dh))->p); MP_free(((MDH*)(dh))->g); MP_free(((MDH*)(dh))->pub_key); MP_free(((MDH*)(dh))->priv_key); free(dh);} while(0)
static int MDH_generate_key(MDH *dh)
{
int num_bytes;
uint32_t seed;
gmp_randstate_t rs;
num_bytes = (mpz_sizeinbase(dh->p, 2) + 7) / 8 - 1;
if (num_bytes <= 0 || num_bytes > 18000)
return 0;
dh->priv_key = calloc(1, sizeof(*dh->priv_key));
if (!dh->priv_key)
return 0;
mpz_init2(dh->priv_key, 1);
gnutls_rnd(GNUTLS_RND_RANDOM, &seed, sizeof(seed));
gmp_randinit_mt(rs);
gmp_randseed_ui(rs, seed);
mpz_urandomb(dh->priv_key, rs, num_bytes);
gmp_randclear(rs);
dh->pub_key = calloc(1, sizeof(*dh->pub_key));
if (!dh->pub_key)
return 0;
mpz_init2(dh->pub_key, 1);
if (!dh->pub_key) {
mpz_clear(dh->priv_key);
free(dh->priv_key);
return 0;
}
mpz_powm(dh->pub_key, dh->g, dh->priv_key, dh->p);
return 1;
}
static int MDH_compute_key(uint8_t *secret, size_t len, MP_t pub, MDH *dh)
{
mpz_ptr k;
int num_bytes;
num_bytes = (mpz_sizeinbase(dh->p, 2) + 7) / 8;
if (num_bytes <= 0 || num_bytes > 18000)
return -1;
k = calloc(1, sizeof(*k));
if (!k)
return -1;
mpz_init2(k, 1);
mpz_powm(k, pub, dh->priv_key, dh->p);
nettle_mpz_get_str_256(len, secret, k);
mpz_clear(k);
free(k);
/* return the length of the shared secret key like DH_compute_key */
return len;
}
#else /* USE_OPENSSL */
#include <openssl/bn.h>
#include <openssl/dh.h>
typedef BIGNUM * MP_t;
#define MP_new(m) m = BN_new()
#define MP_set_w(mpi, w) BN_set_word(mpi, w)
#define MP_cmp(u, v) BN_cmp(u, v)
#define MP_set(u, v) BN_copy(u, v)
#define MP_sub_w(mpi, w) BN_sub_word(mpi, w)
#define MP_cmp_1(mpi) BN_cmp(mpi, BN_value_one())
#define MP_modexp(r, y, q, p) do {BN_CTX *ctx = BN_CTX_new(); BN_mod_exp(r, y, q, p, ctx); BN_CTX_free(ctx);} while(0)
#define MP_free(mpi) BN_free(mpi)
#define MP_gethex(u, hex, res) res = BN_hex2bn(&u, hex)
#define MP_bytes(u) BN_num_bytes(u)
#define MP_setbin(u,buf,len) BN_bn2bin(u,buf)
#define MP_getbin(u,buf,len) u = BN_bin2bn(buf,len,0)
#define MDH DH
#define MDH_new() DH_new()
#define MDH_free(dh) DH_free(dh)
#define MDH_generate_key(dh) DH_generate_key(dh)
#define MDH_compute_key(secret, seclen, pub, dh) DH_compute_key(secret, pub, dh)
#endif
#include "log.h"
#include "dhgroups.h"
/* RFC 2631, Section 2.1.5, http://www.ietf.org/rfc/rfc2631.txt */
static int
isValidPublicKey(MP_t y, MP_t p, MP_t q)
{
int ret = TRUE;
MP_t bn;
assert(y);
MP_new(bn);
assert(bn);
/* y must lie in [2,p-1] */
MP_set_w(bn, 1);
if (MP_cmp(y, bn) < 0)
{
RTMP_Log(RTMP_LOGERROR, "DH public key must be at least 2");
ret = FALSE;
goto failed;
}
/* bn = p-2 */
MP_set(bn, p);
MP_sub_w(bn, 1);
if (MP_cmp(y, bn) > 0)
{
RTMP_Log(RTMP_LOGERROR, "DH public key must be at most p-2");
ret = FALSE;
goto failed;
}
/* Verify with Sophie-Germain prime
*
* This is a nice test to make sure the public key position is calculated
* correctly. This test will fail in about 50% of the cases if applied to
* random data.
*/
if (q)
{
/* y must fulfill y^q mod p = 1 */
MP_modexp(bn, y, q, p);
if (MP_cmp_1(bn) != 0)
{
RTMP_Log(RTMP_LOGWARNING, "DH public key does not fulfill y^q mod p = 1");
}
}
failed:
MP_free(bn);
return ret;
}
static MDH *
DHInit(int nKeyBits)
{
size_t res;
MDH *dh = MDH_new();
if (!dh)
goto failed;
MP_new(dh->g);
if (!dh->g)
goto failed;
MP_gethex(dh->p, P1024, res); /* prime P1024, see dhgroups.h */
if (!res)
{
goto failed;
}
MP_set_w(dh->g, 2); /* base 2 */
dh->length = nKeyBits;
return dh;
failed:
if (dh)
MDH_free(dh);
return 0;
}
static int
DHGenerateKey(MDH *dh)
{
size_t res = 0;
if (!dh)
return 0;
while (!res)
{
MP_t q1 = NULL;
if (!MDH_generate_key(dh))
return 0;
MP_gethex(q1, Q1024, res);
assert(res);
res = isValidPublicKey(dh->pub_key, dh->p, q1);
if (!res)
{
MP_free(dh->pub_key);
MP_free(dh->priv_key);
dh->pub_key = dh->priv_key = 0;
}
MP_free(q1);
}
return 1;
}
/* fill pubkey with the public key in BIG ENDIAN order
* 00 00 00 00 00 x1 x2 x3 .....
*/
static int
DHGetPublicKey(MDH *dh, uint8_t *pubkey, size_t nPubkeyLen)
{
int len;
if (!dh || !dh->pub_key)
return 0;
len = MP_bytes(dh->pub_key);
if (len <= 0 || len > (int) nPubkeyLen)
return 0;
memset(pubkey, 0, nPubkeyLen);
MP_setbin(dh->pub_key, pubkey + (nPubkeyLen - len), len);
return 1;
}
#if 0 /* unused */
static int
DHGetPrivateKey(MDH *dh, uint8_t *privkey, size_t nPrivkeyLen)
{
if (!dh || !dh->priv_key)
return 0;
int len = MP_bytes(dh->priv_key);
if (len <= 0 || len > (int) nPrivkeyLen)
return 0;
memset(privkey, 0, nPrivkeyLen);
MP_setbin(dh->priv_key, privkey + (nPrivkeyLen - len), len);
return 1;
}
#endif
/* computes the shared secret key from the private MDH value and the
* other party's public key (pubkey)
*/
static int
DHComputeSharedSecretKey(MDH *dh, uint8_t *pubkey, size_t nPubkeyLen,
uint8_t *secret)
{
MP_t q1 = NULL, pubkeyBn = NULL;
size_t len;
int res;
if (!dh || !secret || nPubkeyLen >= INT_MAX)
return -1;
MP_getbin(pubkeyBn, pubkey, nPubkeyLen);
if (!pubkeyBn)
return -1;
MP_gethex(q1, Q1024, len);
assert(len);
if (isValidPublicKey(pubkeyBn, dh->p, q1))
res = MDH_compute_key(secret, nPubkeyLen, pubkeyBn, dh);
else
res = -1;
MP_free(q1);
MP_free(pubkeyBn);
return res;
}

View File

@@ -0,0 +1,199 @@
/* librtmp - Diffie-Hellmann Key Exchange
* Copyright (C) 2009 Andrej Stepanchuk
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
/* from RFC 3526, see http://www.ietf.org/rfc/rfc3526.txt */
/* 2^768 - 2 ^704 - 1 + 2^64 * { [2^638 pi] + 149686 } */
#define P768 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A63A3620FFFFFFFFFFFFFFFF"
/* 2^1024 - 2^960 - 1 + 2^64 * { [2^894 pi] + 129093 } */
#define P1024 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381" \
"FFFFFFFFFFFFFFFF"
/* Group morder largest prime factor: */
#define Q1024 \
"7FFFFFFFFFFFFFFFE487ED5110B4611A62633145C06E0E68" \
"948127044533E63A0105DF531D89CD9128A5043CC71A026E" \
"F7CA8CD9E69D218D98158536F92F8A1BA7F09AB6B6A8E122" \
"F242DABB312F3F637A262174D31BF6B585FFAE5B7A035BF6" \
"F71C35FDAD44CFD2D74F9208BE258FF324943328F67329C0" \
"FFFFFFFFFFFFFFFF"
/* 2^1536 - 2^1472 - 1 + 2^64 * { [2^1406 pi] + 741804 } */
#define P1536 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA237327FFFFFFFFFFFFFFFF"
/* 2^2048 - 2^1984 - 1 + 2^64 * { [2^1918 pi] + 124476 } */
#define P2048 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA18217C32905E462E36CE3B" \
"E39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9" \
"DE2BCBF6955817183995497CEA956AE515D2261898FA0510" \
"15728E5A8AACAA68FFFFFFFFFFFFFFFF"
/* 2^3072 - 2^3008 - 1 + 2^64 * { [2^2942 pi] + 1690314 } */
#define P3072 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA18217C32905E462E36CE3B" \
"E39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9" \
"DE2BCBF6955817183995497CEA956AE515D2261898FA0510" \
"15728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64" \
"ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7" \
"ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6B" \
"F12FFA06D98A0864D87602733EC86A64521F2B18177B200C" \
"BBE117577A615D6C770988C0BAD946E208E24FA074E5AB31" \
"43DB5BFCE0FD108E4B82D120A93AD2CAFFFFFFFFFFFFFFFF"
/* 2^4096 - 2^4032 - 1 + 2^64 * { [2^3966 pi] + 240904 } */
#define P4096 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA18217C32905E462E36CE3B" \
"E39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9" \
"DE2BCBF6955817183995497CEA956AE515D2261898FA0510" \
"15728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64" \
"ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7" \
"ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6B" \
"F12FFA06D98A0864D87602733EC86A64521F2B18177B200C" \
"BBE117577A615D6C770988C0BAD946E208E24FA074E5AB31" \
"43DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D7" \
"88719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA" \
"2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6" \
"287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED" \
"1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA9" \
"93B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934063199" \
"FFFFFFFFFFFFFFFF"
/* 2^6144 - 2^6080 - 1 + 2^64 * { [2^6014 pi] + 929484 } */
#define P6144 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA18217C32905E462E36CE3B" \
"E39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9" \
"DE2BCBF6955817183995497CEA956AE515D2261898FA0510" \
"15728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64" \
"ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7" \
"ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6B" \
"F12FFA06D98A0864D87602733EC86A64521F2B18177B200C" \
"BBE117577A615D6C770988C0BAD946E208E24FA074E5AB31" \
"43DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D7" \
"88719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA" \
"2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6" \
"287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED" \
"1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA9" \
"93B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934028492" \
"36C3FAB4D27C7026C1D4DCB2602646DEC9751E763DBA37BD" \
"F8FF9406AD9E530EE5DB382F413001AEB06A53ED9027D831" \
"179727B0865A8918DA3EDBEBCF9B14ED44CE6CBACED4BB1B" \
"DB7F1447E6CC254B332051512BD7AF426FB8F401378CD2BF" \
"5983CA01C64B92ECF032EA15D1721D03F482D7CE6E74FEF6" \
"D55E702F46980C82B5A84031900B1C9E59E7C97FBEC7E8F3" \
"23A97A7E36CC88BE0F1D45B7FF585AC54BD407B22B4154AA" \
"CC8F6D7EBF48E1D814CC5ED20F8037E0A79715EEF29BE328" \
"06A1D58BB7C5DA76F550AA3D8A1FBFF0EB19CCB1A313D55C" \
"DA56C9EC2EF29632387FE8D76E3C0468043E8F663F4860EE" \
"12BF2D5B0B7474D6E694F91E6DCC4024FFFFFFFFFFFFFFFF"
/* 2^8192 - 2^8128 - 1 + 2^64 * { [2^8062 pi] + 4743158 } */
#define P8192 \
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD1" \
"29024E088A67CC74020BBEA63B139B22514A08798E3404DD" \
"EF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245" \
"E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7ED" \
"EE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3D" \
"C2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F" \
"83655D23DCA3AD961C62F356208552BB9ED529077096966D" \
"670C354E4ABC9804F1746C08CA18217C32905E462E36CE3B" \
"E39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9" \
"DE2BCBF6955817183995497CEA956AE515D2261898FA0510" \
"15728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64" \
"ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7" \
"ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6B" \
"F12FFA06D98A0864D87602733EC86A64521F2B18177B200C" \
"BBE117577A615D6C770988C0BAD946E208E24FA074E5AB31" \
"43DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D7" \
"88719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA" \
"2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6" \
"287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED" \
"1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA9" \
"93B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934028492" \
"36C3FAB4D27C7026C1D4DCB2602646DEC9751E763DBA37BD" \
"F8FF9406AD9E530EE5DB382F413001AEB06A53ED9027D831" \
"179727B0865A8918DA3EDBEBCF9B14ED44CE6CBACED4BB1B" \
"DB7F1447E6CC254B332051512BD7AF426FB8F401378CD2BF" \
"5983CA01C64B92ECF032EA15D1721D03F482D7CE6E74FEF6" \
"D55E702F46980C82B5A84031900B1C9E59E7C97FBEC7E8F3" \
"23A97A7E36CC88BE0F1D45B7FF585AC54BD407B22B4154AA" \
"CC8F6D7EBF48E1D814CC5ED20F8037E0A79715EEF29BE328" \
"06A1D58BB7C5DA76F550AA3D8A1FBFF0EB19CCB1A313D55C" \
"DA56C9EC2EF29632387FE8D76E3C0468043E8F663F4860EE" \
"12BF2D5B0B7474D6E694F91E6DBE115974A3926F12FEE5E4" \
"38777CB6A932DF8CD8BEC4D073B931BA3BC832B68D9DD300" \
"741FA7BF8AFC47ED2576F6936BA424663AAB639C5AE4F568" \
"3423B4742BF1C978238F16CBE39D652DE3FDB8BEFC848AD9" \
"22222E04A4037C0713EB57A81A23F0C73473FC646CEA306B" \
"4BCBC8862F8385DDFA9D4B7FA2C087E879683303ED5BDD3A" \
"062B3CF5B3A278A66D2A13F83F44F82DDF310EE074AB6A36" \
"4597E899A0255DC164F31CC50846851DF9AB48195DED7EA1" \
"B1D510BD7EE74D73FAF36BC31ECFA268359046F4EB879F92" \
"4009438B481C6CD7889A002ED5EE382BC9190DA6FC026E47" \
"9558E4475677E9AA9E3050E2765694DFC81F56E880B96E71" \
"60C980DD98EDD3DFFFFFFFFFFFFFFFFF"

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,665 @@
/*
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <ctype.h>
#include <time.h>
#include "rtmp_sys.h"
#include "log.h"
#include "http.h"
#ifdef CRYPTO
#ifdef USE_POLARSSL
#include <polarssl/sha2.h>
#ifndef SHA256_DIGEST_LENGTH
#define SHA256_DIGEST_LENGTH 32
#endif
#define HMAC_CTX sha2_context
#define HMAC_setup(ctx, key, len) sha2_hmac_starts(&ctx, (unsigned char *)key, len, 0)
#define HMAC_crunch(ctx, buf, len) sha2_hmac_update(&ctx, buf, len)
#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; sha2_hmac_finish(&ctx, dig)
#define HMAC_close(ctx)
#elif defined(USE_GNUTLS)
#include <nettle/hmac.h>
#ifndef SHA256_DIGEST_LENGTH
#define SHA256_DIGEST_LENGTH 32
#endif
#undef HMAC_CTX
#define HMAC_CTX struct hmac_sha256_ctx
#define HMAC_setup(ctx, key, len) hmac_sha256_set_key(&ctx, len, key)
#define HMAC_crunch(ctx, buf, len) hmac_sha256_update(&ctx, len, buf)
#define HMAC_finish(ctx, dig, dlen) dlen = SHA256_DIGEST_LENGTH; hmac_sha256_digest(&ctx, SHA256_DIGEST_LENGTH, dig)
#define HMAC_close(ctx)
#else /* USE_OPENSSL */
#include <openssl/ssl.h>
#include <openssl/sha.h>
#include <openssl/hmac.h>
#include <openssl/rc4.h>
#define HMAC_setup(ctx, key, len) HMAC_CTX_init(&ctx); HMAC_Init_ex(&ctx, (unsigned char *)key, len, EVP_sha256(), 0)
#define HMAC_crunch(ctx, buf, len) HMAC_Update(&ctx, (unsigned char *)buf, len)
#define HMAC_finish(ctx, dig, dlen) HMAC_Final(&ctx, (unsigned char *)dig, &dlen);
#define HMAC_close(ctx) HMAC_CTX_cleanup(&ctx)
#endif
extern void RTMP_TLS_Init();
extern TLS_CTX RTMP_TLS_ctx;
#include <zlib.h>
#endif /* CRYPTO */
#define AGENT "Mozilla/5.0"
HTTPResult
HTTP_get(struct HTTP_ctx *http, const char *url, HTTP_read_callback *cb)
{
char *host, *path;
char *p1, *p2;
char hbuf[256];
int port = 80;
#ifdef CRYPTO
int ssl = 0;
#endif
int hlen, flen = 0;
int rc, i;
int len_known;
HTTPResult ret = HTTPRES_OK;
struct sockaddr_in sa;
RTMPSockBuf sb = {0};
http->status = -1;
memset(&sa, 0, sizeof(struct sockaddr_in));
sa.sin_family = AF_INET;
/* we only handle http here */
if (strncasecmp(url, "http", 4))
return HTTPRES_BAD_REQUEST;
if (url[4] == 's')
{
#ifdef CRYPTO
ssl = 1;
port = 443;
if (!RTMP_TLS_ctx)
RTMP_TLS_Init();
#else
return HTTPRES_BAD_REQUEST;
#endif
}
p1 = strchr(url + 4, ':');
if (!p1 || strncmp(p1, "://", 3))
return HTTPRES_BAD_REQUEST;
host = p1 + 3;
path = strchr(host, '/');
hlen = path - host;
strncpy(hbuf, host, hlen);
hbuf[hlen] = '\0';
host = hbuf;
p1 = strrchr(host, ':');
if (p1)
{
*p1++ = '\0';
port = atoi(p1);
}
sa.sin_addr.s_addr = inet_addr(host);
if (sa.sin_addr.s_addr == INADDR_NONE)
{
struct hostent *hp = gethostbyname(host);
if (!hp || !hp->h_addr)
return HTTPRES_LOST_CONNECTION;
sa.sin_addr = *(struct in_addr *)hp->h_addr;
}
sa.sin_port = htons(port);
sb.sb_socket = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (sb.sb_socket == -1)
return HTTPRES_LOST_CONNECTION;
i =
sprintf(sb.sb_buf,
"GET %s HTTP/1.0\r\nUser-Agent: %s\r\nHost: %s\r\nReferer: %.*s\r\n",
path, AGENT, host, (int)(path - url + 1), url);
if (http->date[0])
i += sprintf(sb.sb_buf + i, "If-Modified-Since: %s\r\n", http->date);
i += sprintf(sb.sb_buf + i, "\r\n");
if (connect
(sb.sb_socket, (struct sockaddr *)&sa, sizeof(struct sockaddr)) < 0)
{
ret = HTTPRES_LOST_CONNECTION;
goto leave;
}
#ifdef CRYPTO
if (ssl)
{
#ifdef NO_SSL
RTMP_Log(RTMP_LOGERROR, "%s, No SSL/TLS support", __FUNCTION__);
ret = HTTPRES_BAD_REQUEST;
goto leave;
#else
TLS_client(RTMP_TLS_ctx, sb.sb_ssl);
TLS_setfd(sb.sb_ssl, sb.sb_socket);
if (TLS_connect(sb.sb_ssl) < 0)
{
RTMP_Log(RTMP_LOGERROR, "%s, TLS_Connect failed", __FUNCTION__);
ret = HTTPRES_LOST_CONNECTION;
goto leave;
}
#endif
}
#endif
RTMPSockBuf_Send(&sb, sb.sb_buf, i);
/* set timeout */
#define HTTP_TIMEOUT 5
{
SET_RCVTIMEO(tv, HTTP_TIMEOUT);
if (setsockopt
(sb.sb_socket, SOL_SOCKET, SO_RCVTIMEO, (char *)&tv, sizeof(tv)))
{
RTMP_Log(RTMP_LOGERROR, "%s, Setting socket timeout to %ds failed!",
__FUNCTION__, HTTP_TIMEOUT);
}
}
sb.sb_size = 0;
sb.sb_timedout = FALSE;
if (RTMPSockBuf_Fill(&sb) < 1)
{
ret = HTTPRES_LOST_CONNECTION;
goto leave;
}
if (strncmp(sb.sb_buf, "HTTP/1", 6))
{
ret = HTTPRES_BAD_REQUEST;
goto leave;
}
p1 = strchr(sb.sb_buf, ' ');
rc = atoi(p1 + 1);
http->status = rc;
if (rc >= 300)
{
if (rc == 304)
{
ret = HTTPRES_OK_NOT_MODIFIED;
goto leave;
}
else if (rc == 404)
ret = HTTPRES_NOT_FOUND;
else if (rc >= 500)
ret = HTTPRES_SERVER_ERROR;
else if (rc >= 400)
ret = HTTPRES_BAD_REQUEST;
else
ret = HTTPRES_REDIRECTED;
}
p1 = memchr(sb.sb_buf, '\n', sb.sb_size);
if (!p1)
{
ret = HTTPRES_BAD_REQUEST;
goto leave;
}
sb.sb_start = p1 + 1;
sb.sb_size -= sb.sb_start - sb.sb_buf;
while ((p2 = memchr(sb.sb_start, '\r', sb.sb_size)))
{
if (*sb.sb_start == '\r')
{
sb.sb_start += 2;
sb.sb_size -= 2;
break;
}
else
if (!strncasecmp
(sb.sb_start, "Content-Length: ", sizeof("Content-Length: ") - 1))
{
flen = atoi(sb.sb_start + sizeof("Content-Length: ") - 1);
}
else
if (!strncasecmp
(sb.sb_start, "Last-Modified: ", sizeof("Last-Modified: ") - 1))
{
*p2 = '\0';
strcpy(http->date, sb.sb_start + sizeof("Last-Modified: ") - 1);
}
p2 += 2;
sb.sb_size -= p2 - sb.sb_start;
sb.sb_start = p2;
if (sb.sb_size < 1)
{
if (RTMPSockBuf_Fill(&sb) < 1)
{
ret = HTTPRES_LOST_CONNECTION;
goto leave;
}
}
}
len_known = flen > 0;
while ((!len_known || flen > 0) &&
(sb.sb_size > 0 || RTMPSockBuf_Fill(&sb) > 0))
{
cb(sb.sb_start, 1, sb.sb_size, http->data);
if (len_known)
flen -= sb.sb_size;
http->size += sb.sb_size;
sb.sb_size = 0;
}
if (flen > 0)
ret = HTTPRES_LOST_CONNECTION;
leave:
RTMPSockBuf_Close(&sb);
return ret;
}
#ifdef CRYPTO
#define CHUNK 16384
struct info
{
z_stream *zs;
HMAC_CTX ctx;
int first;
int zlib;
int size;
};
static size_t
swfcrunch(void *ptr, size_t size, size_t nmemb, void *stream)
{
struct info *i = stream;
char *p = ptr;
size_t len = size * nmemb;
if (i->first)
{
i->first = 0;
/* compressed? */
if (!strncmp(p, "CWS", 3))
{
*p = 'F';
i->zlib = 1;
}
HMAC_crunch(i->ctx, (unsigned char *)p, 8);
p += 8;
len -= 8;
i->size = 8;
}
if (i->zlib)
{
unsigned char out[CHUNK];
i->zs->next_in = (unsigned char *)p;
i->zs->avail_in = len;
do
{
i->zs->avail_out = CHUNK;
i->zs->next_out = out;
inflate(i->zs, Z_NO_FLUSH);
len = CHUNK - i->zs->avail_out;
i->size += len;
HMAC_crunch(i->ctx, out, len);
}
while (i->zs->avail_out == 0);
}
else
{
i->size += len;
HMAC_crunch(i->ctx, (unsigned char *)p, len);
}
return size * nmemb;
}
static int tzoff;
static int tzchecked;
#define JAN02_1980 318340800
static const char *monthtab[12] = { "Jan", "Feb", "Mar",
"Apr", "May", "Jun",
"Jul", "Aug", "Sep",
"Oct", "Nov", "Dec"
};
static const char *days[] =
{ "Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat" };
/* Parse an HTTP datestamp into Unix time */
static time_t
make_unix_time(char *s)
{
struct tm time;
int i, ysub = 1900, fmt = 0;
char *month;
char *n;
time_t res;
if (s[3] != ' ')
{
fmt = 1;
if (s[3] != ',')
ysub = 0;
}
for (n = s; *n; ++n)
if (*n == '-' || *n == ':')
*n = ' ';
time.tm_mon = 0;
n = strchr(s, ' ');
if (fmt)
{
/* Day, DD-MMM-YYYY HH:MM:SS GMT */
time.tm_mday = strtol(n + 1, &n, 0);
month = n + 1;
n = strchr(month, ' ');
time.tm_year = strtol(n + 1, &n, 0);
time.tm_hour = strtol(n + 1, &n, 0);
time.tm_min = strtol(n + 1, &n, 0);
time.tm_sec = strtol(n + 1, NULL, 0);
}
else
{
/* Unix ctime() format. Does not conform to HTTP spec. */
/* Day MMM DD HH:MM:SS YYYY */
month = n + 1;
n = strchr(month, ' ');
while (isspace(*n))
n++;
time.tm_mday = strtol(n, &n, 0);
time.tm_hour = strtol(n + 1, &n, 0);
time.tm_min = strtol(n + 1, &n, 0);
time.tm_sec = strtol(n + 1, &n, 0);
time.tm_year = strtol(n + 1, NULL, 0);
}
if (time.tm_year > 100)
time.tm_year -= ysub;
for (i = 0; i < 12; i++)
if (!strncasecmp(month, monthtab[i], 3))
{
time.tm_mon = i;
break;
}
time.tm_isdst = 0; /* daylight saving is never in effect in GMT */
/* this is normally the value of extern int timezone, but some
* braindead C libraries don't provide it.
*/
if (!tzchecked)
{
struct tm *tc;
time_t then = JAN02_1980;
tc = localtime(&then);
tzoff = (12 - tc->tm_hour) * 3600 + tc->tm_min * 60 + tc->tm_sec;
tzchecked = 1;
}
res = mktime(&time);
/* Unfortunately, mktime() assumes the input is in local time,
* not GMT, so we have to correct it here.
*/
if (res != -1)
res += tzoff;
return res;
}
/* Convert a Unix time to a network time string
* Weekday, DD-MMM-YYYY HH:MM:SS GMT
*/
static void
strtime(time_t * t, char *s)
{
struct tm *tm;
tm = gmtime((time_t *) t);
sprintf(s, "%s, %02d %s %d %02d:%02d:%02d GMT",
days[tm->tm_wday], tm->tm_mday, monthtab[tm->tm_mon],
tm->tm_year + 1900, tm->tm_hour, tm->tm_min, tm->tm_sec);
}
#define HEX2BIN(a) (((a)&0x40)?((a)&0xf)+9:((a)&0xf))
int
RTMP_HashSWF(const char *url, unsigned int *size, unsigned char *hash,
int age)
{
FILE *f = NULL;
char *path, date[64], cctim[64];
long pos = 0;
time_t ctim = -1, cnow;
int i, got = 0, ret = 0;
unsigned int hlen;
struct info in = { 0 };
struct HTTP_ctx http = { 0 };
HTTPResult httpres;
z_stream zs = { 0 };
AVal home, hpre;
date[0] = '\0';
#ifdef _WIN32
#ifdef XBMC4XBOX
hpre.av_val = "Q:";
hpre.av_len = 2;
home.av_val = "\\UserData";
#else
hpre.av_val = getenv("HOMEDRIVE");
hpre.av_len = strlen(hpre.av_val);
home.av_val = getenv("HOMEPATH");
#endif
#define DIRSEP "\\"
#else /* !_WIN32 */
hpre.av_val = "";
hpre.av_len = 0;
home.av_val = getenv("HOME");
#define DIRSEP "/"
#endif
if (!home.av_val)
home.av_val = ".";
home.av_len = strlen(home.av_val);
/* SWF hash info is cached in a fixed-format file.
* url: <url of SWF file>
* ctim: HTTP datestamp of when we last checked it.
* date: HTTP datestamp of the SWF's last modification.
* size: SWF size in hex
* hash: SWF hash in hex
*
* These fields must be present in this order. All fields
* besides URL are fixed size.
*/
path = malloc(hpre.av_len + home.av_len + sizeof(DIRSEP ".swfinfo"));
sprintf(path, "%s%s" DIRSEP ".swfinfo", hpre.av_val, home.av_val);
f = fopen(path, "r+");
while (f)
{
char buf[4096], *file, *p;
file = strchr(url, '/');
if (!file)
break;
file += 2;
file = strchr(file, '/');
if (!file)
break;
file++;
hlen = file - url;
p = strrchr(file, '/');
if (p)
file = p;
else
file--;
while (fgets(buf, sizeof(buf), f))
{
char *r1;
got = 0;
if (strncmp(buf, "url: ", 5))
continue;
if (strncmp(buf + 5, url, hlen))
continue;
r1 = strrchr(buf, '/');
i = strlen(r1);
r1[--i] = '\0';
if (strncmp(r1, file, i))
continue;
pos = ftell(f);
while (got < 4 && fgets(buf, sizeof(buf), f))
{
if (!strncmp(buf, "size: ", 6))
{
*size = strtol(buf + 6, NULL, 16);
got++;
}
else if (!strncmp(buf, "hash: ", 6))
{
unsigned char *ptr = hash, *in = (unsigned char *)buf + 6;
int l = strlen((char *)in) - 1;
for (i = 0; i < l; i += 2)
*ptr++ = (HEX2BIN(in[i]) << 4) | HEX2BIN(in[i + 1]);
got++;
}
else if (!strncmp(buf, "date: ", 6))
{
buf[strlen(buf) - 1] = '\0';
strncpy(date, buf + 6, sizeof(date));
got++;
}
else if (!strncmp(buf, "ctim: ", 6))
{
buf[strlen(buf) - 1] = '\0';
ctim = make_unix_time(buf + 6);
got++;
}
else if (!strncmp(buf, "url: ", 5))
break;
}
break;
}
break;
}
cnow = time(NULL);
/* If we got a cache time, see if it's young enough to use directly */
if (age && ctim > 0)
{
ctim = cnow - ctim;
ctim /= 3600 * 24; /* seconds to days */
if (ctim < age) /* ok, it's new enough */
goto out;
}
in.first = 1;
HMAC_setup(in.ctx, "Genuine Adobe Flash Player 001", 30);
inflateInit(&zs);
in.zs = &zs;
http.date = date;
http.data = &in;
httpres = HTTP_get(&http, url, swfcrunch);
inflateEnd(&zs);
if (httpres != HTTPRES_OK && httpres != HTTPRES_OK_NOT_MODIFIED)
{
ret = -1;
if (httpres == HTTPRES_LOST_CONNECTION)
RTMP_Log(RTMP_LOGERROR, "%s: connection lost while downloading swfurl %s",
__FUNCTION__, url);
else if (httpres == HTTPRES_NOT_FOUND)
RTMP_Log(RTMP_LOGERROR, "%s: swfurl %s not found", __FUNCTION__, url);
else
RTMP_Log(RTMP_LOGERROR, "%s: couldn't contact swfurl %s (HTTP error %d)",
__FUNCTION__, url, http.status);
}
else
{
if (got && pos)
fseek(f, pos, SEEK_SET);
else
{
char *q;
if (!f)
f = fopen(path, "w");
if (!f)
{
int err = errno;
RTMP_Log(RTMP_LOGERROR,
"%s: couldn't open %s for writing, errno %d (%s)",
__FUNCTION__, path, err, strerror(err));
ret = -1;
goto out;
}
fseek(f, 0, SEEK_END);
q = strchr(url, '?');
if (q)
i = q - url;
else
i = strlen(url);
fprintf(f, "url: %.*s\n", i, url);
}
strtime(&cnow, cctim);
fprintf(f, "ctim: %s\n", cctim);
if (!in.first)
{
HMAC_finish(in.ctx, hash, hlen);
*size = in.size;
fprintf(f, "date: %s\n", date);
fprintf(f, "size: %08x\n", in.size);
fprintf(f, "hash: ");
for (i = 0; i < SHA256_DIGEST_LENGTH; i++)
fprintf(f, "%02x", hash[i]);
fprintf(f, "\n");
}
}
HMAC_close(in.ctx);
out:
free(path);
if (f)
fclose(f);
return ret;
}
#else
int
RTMP_HashSWF(const char *url, unsigned int *size, unsigned char *hash,
int age)
{
return -1;
}
#endif

View File

@@ -0,0 +1,47 @@
#ifndef __RTMP_HTTP_H__
#define __RTMP_HTTP_H__
/*
* Copyright (C) 2010 Howard Chu
* Copyright (C) 2010 Antti Ajanki
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
typedef enum {
HTTPRES_OK, /* result OK */
HTTPRES_OK_NOT_MODIFIED, /* not modified since last request */
HTTPRES_NOT_FOUND, /* not found */
HTTPRES_BAD_REQUEST, /* client error */
HTTPRES_SERVER_ERROR, /* server reported an error */
HTTPRES_REDIRECTED, /* resource has been moved */
HTTPRES_LOST_CONNECTION /* connection lost while waiting for data */
} HTTPResult;
struct HTTP_ctx {
char *date;
int size;
int status;
void *data;
};
typedef size_t (HTTP_read_callback)(void *ptr, size_t size, size_t nmemb, void *stream);
HTTPResult HTTP_get(struct HTTP_ctx *http, const char *url, HTTP_read_callback *cb);
#endif

View File

@@ -0,0 +1,210 @@
.TH LIBRTMP 3 "2011-07-20" "RTMPDump v2.4"
.\" Copyright 2011 Howard Chu.
.\" Copying permitted according to the GNU General Public License V2.
.SH NAME
librtmp \- RTMPDump Real-Time Messaging Protocol API
.SH LIBRARY
RTMPDump RTMP (librtmp, -lrtmp)
.SH SYNOPSIS
.B #include <librtmp/rtmp.h>
.SH DESCRIPTION
The Real-Time Messaging Protocol (RTMP) is used for streaming
multimedia content across a TCP/IP network. This API provides most client
functions and a few server functions needed to support RTMP, RTMP tunneled
in HTTP (RTMPT), encrypted RTMP (RTMPE), RTMP over SSL/TLS (RTMPS) and
tunneled variants of these encrypted types (RTMPTE, RTMPTS). The basic
RTMP specification has been published by Adobe but this API was
reverse-engineered without use of the Adobe specification. As such, it may
deviate from any published specifications but it usually duplicates the
actual behavior of the original Adobe clients.
The RTMPDump software package includes a basic client utility program
in
.BR rtmpdump (1),
some sample servers, and a library used to provide programmatic access
to the RTMP protocol. This man page gives an overview of the RTMP
library routines. These routines are found in the -lrtmp library. Many
other routines are also available, but they are not documented yet.
The basic interaction is as follows. A session handle is created using
.BR RTMP_Alloc ()
and initialized using
.BR RTMP_Init ().
All session parameters are provided using
.BR RTMP_SetupURL ().
The network connection is established using
.BR RTMP_Connect (),
and then the RTMP session is established using
.BR RTMP_ConnectStream ().
The stream is read using
.BR RTMP_Read ().
A client can publish a stream by calling
.BR RTMP_EnableWrite ()
before the
.BR RTMP_Connect ()
call, and then using
.BR RTMP_Write ()
after the session is established.
While a stream is playing it may be paused and unpaused using
.BR RTMP_Pause ().
The stream playback position can be moved using
.BR RTMP_Seek ().
When
.BR RTMP_Read ()
returns 0 bytes, the stream is complete and may be closed using
.BR RTMP_Close ().
The session handle is freed using
.BR RTMP_Free ().
All data is transferred using FLV format. The basic session requires
an RTMP URL. The RTMP URL format is of the form
.nf
rtmp[t][e|s]://hostname[:port][/app[/playpath]]
.fi
Plain rtmp, as well as tunneled and encrypted sessions are supported.
Additional options may be specified by appending space-separated
key=value pairs to the URL. Special characters in values may need
to be escaped to prevent misinterpretation by the option parser.
The escape encoding uses a backslash followed by two hexadecimal digits
representing the ASCII value of the character. E.g., spaces must
be escaped as \fB\\20\fP and backslashes must be escaped as \fB\\5c\fP.
.SH OPTIONS
.SS "Network Parameters"
These options define how to connect to the media server.
.TP
.BI socks= host:port
Use the specified SOCKS4 proxy.
.SS "Connection Parameters"
These options define the content of the RTMP Connect request packet.
If correct values are not provided, the media server will reject the
connection attempt.
.TP
.BI app= name
Name of application to connect to on the RTMP server. Overrides
the app in the RTMP URL. Sometimes the librtmp URL parser cannot
determine the app name automatically, so it must be given explicitly
using this option.
.TP
.BI tcUrl= url
URL of the target stream. Defaults to rtmp[t][e|s]://host[:port]/app.
.TP
.BI pageUrl= url
URL of the web page in which the media was embedded. By default no
value will be sent.
.TP
.BI swfUrl= url
URL of the SWF player for the media. By default no value will be sent.
.TP
.BI flashVer= version
Version of the Flash plugin used to run the SWF player. The
default is "LNX 10,0,32,18".
.TP
.BI conn= type:data
Append arbitrary AMF data to the Connect message. The type
must be B for Boolean, N for number, S for string, O for object, or Z
for null. For Booleans the data must be either 0 or 1 for FALSE or TRUE,
respectively. Likewise for Objects the data must be 0 or 1 to end or
begin an object, respectively. Data items in subobjects may be named, by
prefixing the type with 'N' and specifying the name before the value, e.g.
NB:myFlag:1. This option may be used multiple times to construct arbitrary
AMF sequences. E.g.
.nf
conn=B:1 conn=S:authMe conn=O:1 conn=NN:code:1.23 conn=NS:flag:ok conn=O:0
.fi
.SS "Session Parameters"
These options take effect after the Connect request has succeeded.
.TP
.BI playpath= path
Overrides the playpath parsed from the RTMP URL. Sometimes the
rtmpdump URL parser cannot determine the correct playpath
automatically, so it must be given explicitly using this option.
.TP
.BI playlist= 0|1
If the value is 1 or TRUE, issue a set_playlist command before sending the
play command. The playlist will just contain the current playpath. If the
value is 0 or FALSE, the set_playlist command will not be sent. The
default is FALSE.
.TP
.BI live= 0|1
Specify that the media is a live stream. No resuming or seeking in
live streams is possible.
.TP
.BI subscribe= path
Name of live stream to subscribe to. Defaults to
.IR playpath .
.TP
.BI start= num
Start at
.I num
seconds into the stream. Not valid for live streams.
.TP
.BI stop= num
Stop at
.I num
seconds into the stream.
.TP
.BI buffer= num
Set buffer time to
.I num
milliseconds. The default is 30000.
.TP
.BI timeout= num
Timeout the session after
.I num
seconds without receiving any data from the server. The default is 120.
.SS "Security Parameters"
These options handle additional authentication requests from the server.
.TP
.BI token= key
Key for SecureToken response, used if the server requires SecureToken
authentication.
.TP
.BI jtv= JSON
JSON token used by legacy Justin.tv servers. Invokes NetStream.Authenticate.UsherToken
.TP
.BI swfVfy= 0|1
If the value is 1 or TRUE, the SWF player is retrieved from the
specified
.I swfUrl
for performing SWF Verification. The SWF hash and size (used in the
verification step) are computed automatically. Also the SWF information is
cached in a
.I .swfinfo
file in the user's home directory, so that it doesn't need to be retrieved
and recalculated every time. The .swfinfo file records
the SWF URL, the time it was fetched, the modification timestamp of the SWF
file, its size, and its hash. By default, the cached info will be used
for 30 days before re-checking.
.TP
.BI swfAge= days
Specify how many days to use the cached SWF info before re-checking. Use
0 to always check the SWF URL. Note that if the check shows that the
SWF file has the same modification timestamp as before, it will not be
retrieved again.
.SH EXAMPLES
An example character string suitable for use with
.BR RTMP_SetupURL ():
.nf
"rtmp://flashserver:1935/ondemand/thefile swfUrl=http://flashserver/player.swf swfVfy=1"
.fi
.SH ENVIRONMENT
.TP
.B HOME
The value of
.RB $ HOME
is used as the location for the
.I .swfinfo
file.
.SH FILES
.TP
.I $HOME/.swfinfo
Cache of SWF Verification information
.SH "SEE ALSO"
.BR rtmpdump (1),
.BR rtmpgw (8)
.SH AUTHORS
Andrej Stepanchuk, Howard Chu, The Flvstreamer Team
.br
<http://rtmpdump.mplayerhq.hu>

View File

@@ -0,0 +1,312 @@
<HTML>
<HEAD>
<title>LIBRTMP(3): </title></head>
<table>
<thead>
<tr><td>LIBRTMP(3)<td align="center"><td align="right">LIBRTMP(3)
</thead>
<tfoot>
<tr><td>RTMPDump v2.4<td align="center">2011-07-20<td align="right">LIBRTMP(3)
</tfoot>
<tbody><tr><td colspan="3"><br><br><ul>
<!-- Copyright 2011 Howard Chu.
Copying permitted according to the GNU General Public License V2.-->
</ul>
<h3>NAME</h3><ul>
librtmp &minus; RTMPDump Real-Time Messaging Protocol API
</ul>
<h3>LIBRARY</h3><ul>
RTMPDump RTMP (librtmp, -lrtmp)
</ul>
<h3>SYNOPSIS</h3><ul>
<b>#include &lt;librtmp/rtmp.h&gt;</b>
</ul>
<h3>DESCRIPTION</h3><ul>
The Real-Time Messaging Protocol (RTMP) is used for streaming
multimedia content across a TCP/IP network. This API provides most client
functions and a few server functions needed to support RTMP, RTMP tunneled
in HTTP (RTMPT), encrypted RTMP (RTMPE), RTMP over SSL/TLS (RTMPS) and
tunneled variants of these encrypted types (RTMPTE, RTMPTS). The basic
RTMP specification has been published by Adobe but this API was
reverse-engineered without use of the Adobe specification. As such, it may
deviate from any published specifications but it usually duplicates the
actual behavior of the original Adobe clients.
<p>
The RTMPDump software package includes a basic client utility program
in
<a href="../man1/rtmpdump.1"><b>rtmpdump</b></a>(1),
some sample servers, and a library used to provide programmatic access
to the RTMP protocol. This man page gives an overview of the RTMP
library routines. These routines are found in the -lrtmp library. Many
other routines are also available, but they are not documented yet.
<p>
The basic interaction is as follows. A session handle is created using
<b>RTMP_Alloc</b>()
and initialized using
<b>RTMP_Init</b>().
All session parameters are provided using
<b>RTMP_SetupURL</b>().
The network connection is established using
<b>RTMP_Connect</b>(),
and then the RTMP session is established using
<b>RTMP_ConnectStream</b>().
The stream is read using
<b>RTMP_Read</b>().
A client can publish a stream by calling
<b>RTMP_EnableWrite</b>()
before the
<b>RTMP_Connect</b>()
call, and then using
<b>RTMP_Write</b>()
after the session is established.
While a stream is playing it may be paused and unpaused using
<b>RTMP_Pause</b>().
The stream playback position can be moved using
<b>RTMP_Seek</b>().
When
<b>RTMP_Read</b>()
returns 0 bytes, the stream is complete and may be closed using
<b>RTMP_Close</b>().
The session handle is freed using
<b>RTMP_Free</b>().
<p>
All data is transferred using FLV format. The basic session requires
an RTMP URL. The RTMP URL format is of the form
<pre>
rtmp[t][e|s]://hostname[:port][/app[/playpath]]
</pre>
<p>
Plain rtmp, as well as tunneled and encrypted sessions are supported.
<p>
Additional options may be specified by appending space-separated
key=value pairs to the URL. Special characters in values may need
to be escaped to prevent misinterpretation by the option parser.
The escape encoding uses a backslash followed by two hexadecimal digits
representing the ASCII value of the character. E.g., spaces must
be escaped as <b>\20</b> and backslashes must be escaped as <b>\5c</b>.
</ul>
<h3>OPTIONS</h3><ul>
</ul>
<h4>Network Parameters</h4><ul>
These options define how to connect to the media server.
<p>
<dl compact><dt>
<b>socks=</b><i>host:port</i>
<dd>
Use the specified SOCKS4 proxy.
</dl>
</ul>
<h4>Connection Parameters</h4><ul>
These options define the content of the RTMP Connect request packet.
If correct values are not provided, the media server will reject the
connection attempt.
<p>
<dl compact><dt>
<b>app=</b><i>name</i>
<dd>
Name of application to connect to on the RTMP server. Overrides
the app in the RTMP URL. Sometimes the librtmp URL parser cannot
determine the app name automatically, so it must be given explicitly
using this option.
</dl>
<p>
<dl compact><dt>
<b>tcUrl=</b><i>url</i>
<dd>
URL of the target stream. Defaults to rtmp[t][e|s]://host[:port]/app.
</dl>
<p>
<dl compact><dt>
<b>pageUrl=</b><i>url</i>
<dd>
URL of the web page in which the media was embedded. By default no
value will be sent.
</dl>
<p>
<dl compact><dt>
<b>swfUrl=</b><i>url</i>
<dd>
URL of the SWF player for the media. By default no value will be sent.
</dl>
<p>
<dl compact><dt>
<b>flashVer=</b><i>version</i>
<dd>
Version of the Flash plugin used to run the SWF player. The
default is "LNX 10,0,32,18".
</dl>
<p>
<dl compact><dt>
<b>conn=</b><i>type:data</i>
<dd>
Append arbitrary AMF data to the Connect message. The type
must be B for Boolean, N for number, S for string, O for object, or Z
for null. For Booleans the data must be either 0 or 1 for FALSE or TRUE,
respectively. Likewise for Objects the data must be 0 or 1 to end or
begin an object, respectively. Data items in subobjects may be named, by
prefixing the type with 'N' and specifying the name before the value, e.g.
NB:myFlag:1. This option may be used multiple times to construct arbitrary
AMF sequences. E.g.
<pre>
conn=B:1 conn=S:authMe conn=O:1 conn=NN:code:1.23 conn=NS:flag:ok conn=O:0
</pre>
</dl>
</ul>
<h4>Session Parameters</h4><ul>
These options take effect after the Connect request has succeeded.
<p>
<dl compact><dt>
<b>playpath=</b><i>path</i>
<dd>
Overrides the playpath parsed from the RTMP URL. Sometimes the
rtmpdump URL parser cannot determine the correct playpath
automatically, so it must be given explicitly using this option.
</dl>
<p>
<dl compact><dt>
<b>playlist=</b><i>0|1</i>
<dd>
If the value is 1 or TRUE, issue a set_playlist command before sending the
play command. The playlist will just contain the current playpath. If the
value is 0 or FALSE, the set_playlist command will not be sent. The
default is FALSE.
</dl>
<p>
<dl compact><dt>
<b>live=</b><i>0|1</i>
<dd>
Specify that the media is a live stream. No resuming or seeking in
live streams is possible.
</dl>
<p>
<dl compact><dt>
<b>subscribe=</b><i>path</i>
<dd>
Name of live stream to subscribe to. Defaults to
<i>playpath</i>.
</dl>
<p>
<dl compact><dt>
<b>start=</b><i>num</i>
<dd>
Start at
<i>num</i>
seconds into the stream. Not valid for live streams.
</dl>
<p>
<dl compact><dt>
<b>stop=</b><i>num</i>
<dd>
Stop at
<i>num</i>
seconds into the stream.
</dl>
<p>
<dl compact><dt>
<b>buffer=</b><i>num</i>
<dd>
Set buffer time to
<i>num</i>
milliseconds. The default is 30000.
</dl>
<p>
<dl compact><dt>
<b>timeout=</b><i>num</i>
<dd>
Timeout the session after
<i>num</i>
seconds without receiving any data from the server. The default is 120.
</dl>
</ul>
<h4>Security Parameters</h4><ul>
These options handle additional authentication requests from the server.
<p>
<dl compact><dt>
<b>token=</b><i>key</i>
<dd>
Key for SecureToken response, used if the server requires SecureToken
authentication.
</dl>
<p>
<dl compact><dt>
<b>jtv=</b><i>JSON</i>
<dd>
JSON token used by legacy Justin.tv servers. Invokes NetStream.Authenticate.UsherToken
</dl>
<p>
<dl compact><dt>
<b>swfVfy=</b><i>0|1</i>
<dd>
If the value is 1 or TRUE, the SWF player is retrieved from the
specified
<i>swfUrl</i>
for performing SWF Verification. The SWF hash and size (used in the
verification step) are computed automatically. Also the SWF information is
cached in a
<i>.swfinfo</i>
file in the user's home directory, so that it doesn't need to be retrieved
and recalculated every time. The .swfinfo file records
the SWF URL, the time it was fetched, the modification timestamp of the SWF
file, its size, and its hash. By default, the cached info will be used
for 30 days before re-checking.
</dl>
<p>
<dl compact><dt>
<b>swfAge=</b><i>days</i>
<dd>
Specify how many days to use the cached SWF info before re-checking. Use
0 to always check the SWF URL. Note that if the check shows that the
SWF file has the same modification timestamp as before, it will not be
retrieved again.
</dl>
</ul>
<h3>EXAMPLES</h3><ul>
An example character string suitable for use with
<b>RTMP_SetupURL</b>():
<pre>
"rtmp://flashserver:1935/ondemand/thefile swfUrl=<a href="http://flashserver/player.swf">http://flashserver/player.swf</a> swfVfy=1"
</pre>
</ul>
<h3>ENVIRONMENT</h3><ul>
<p>
<dl compact><dt>
<b>HOME</b>
<dd>
The value of
$<b>HOME</b>
is used as the location for the
<i>.swfinfo</i>
file.
</dl>
</ul>
<h3>FILES</h3><ul>
<p>
<dl compact><dt>
<i>$HOME/.swfinfo</i>
<dd>
Cache of SWF Verification information
</dl>
</ul>
<h3>SEE ALSO</h3><ul>
<a href="../man1/rtmpdump.1"><b>rtmpdump</b></a>(1),
<a href="../man8/rtmpgw.8"><b>rtmpgw</b></a>(8)
</ul>
<h3>AUTHORS</h3><ul>
Andrej Stepanchuk, Howard Chu, The Flvstreamer Team
<br>
&lt;<a href="http://rtmpdump.mplayerhq.hu">http://rtmpdump.mplayerhq.hu</a>>
</ul></tbody></table></html>

View File

@@ -0,0 +1,13 @@
prefix=@prefix@
exec_prefix=${prefix}
libdir=@libdir@
incdir=${prefix}/include
Name: librtmp
Description: RTMP implementation
Version: @VERSION@
Requires: @CRYPTO_REQ@
URL: http://rtmpdump.mplayerhq.hu
Libs: -L${libdir} -lrtmp -lz
Libs.private: @PRIVATE_LIBS@
Cflags: -I${incdir}

View File

@@ -0,0 +1,220 @@
/*
* Copyright (C) 2008-2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#include <stdio.h>
#include <stdarg.h>
#include <string.h>
#include <assert.h>
#include <ctype.h>
#include "rtmp_sys.h"
#include "log.h"
#define MAX_PRINT_LEN 2048
RTMP_LogLevel RTMP_debuglevel = RTMP_LOGERROR;
static int neednl;
static FILE *fmsg;
static RTMP_LogCallback rtmp_log_default, *cb = rtmp_log_default;
static const char *levels[] = {
"CRIT", "ERROR", "WARNING", "INFO",
"DEBUG", "DEBUG2"
};
static void rtmp_log_default(int level, const char *format, va_list vl)
{
char str[MAX_PRINT_LEN]="";
vsnprintf(str, MAX_PRINT_LEN-1, format, vl);
/* Filter out 'no-name' */
if ( RTMP_debuglevel<RTMP_LOGALL && strstr(str, "no-name" ) != NULL )
return;
if ( !fmsg ) fmsg = stderr;
if ( level <= RTMP_debuglevel ) {
if (neednl) {
putc('\n', fmsg);
neednl = 0;
}
fprintf(fmsg, "%s: %s\n", levels[level], str);
#ifdef _DEBUG
fflush(fmsg);
#endif
}
}
void RTMP_LogSetOutput(FILE *file)
{
fmsg = file;
}
void RTMP_LogSetLevel(RTMP_LogLevel level)
{
RTMP_debuglevel = level;
}
void RTMP_LogSetCallback(RTMP_LogCallback *cbp)
{
cb = cbp;
}
RTMP_LogLevel RTMP_LogGetLevel()
{
return RTMP_debuglevel;
}
void RTMP_Log(int level, const char *format, ...)
{
va_list args;
va_start(args, format);
cb(level, format, args);
va_end(args);
}
static const char hexdig[] = "0123456789abcdef";
void RTMP_LogHex(int level, const uint8_t *data, unsigned long len)
{
unsigned long i;
char line[50], *ptr;
if ( level > RTMP_debuglevel )
return;
ptr = line;
for(i=0; i<len; i++) {
*ptr++ = hexdig[0x0f & (data[i] >> 4)];
*ptr++ = hexdig[0x0f & data[i]];
if ((i & 0x0f) == 0x0f) {
*ptr = '\0';
ptr = line;
RTMP_Log(level, "%s", line);
} else {
*ptr++ = ' ';
}
}
if (i & 0x0f) {
*ptr = '\0';
RTMP_Log(level, "%s", line);
}
}
void RTMP_LogHexString(int level, const uint8_t *data, unsigned long len)
{
#define BP_OFFSET 9
#define BP_GRAPH 60
#define BP_LEN 80
char line[BP_LEN];
unsigned long i;
if ( !data || level > RTMP_debuglevel )
return;
/* in case len is zero */
line[0] = '\0';
for ( i = 0 ; i < len ; i++ ) {
int n = i % 16;
unsigned off;
if( !n ) {
if( i ) RTMP_Log( level, "%s", line );
memset( line, ' ', sizeof(line)-2 );
line[sizeof(line)-2] = '\0';
off = i % 0x0ffffU;
line[2] = hexdig[0x0f & (off >> 12)];
line[3] = hexdig[0x0f & (off >> 8)];
line[4] = hexdig[0x0f & (off >> 4)];
line[5] = hexdig[0x0f & off];
line[6] = ':';
}
off = BP_OFFSET + n*3 + ((n >= 8)?1:0);
line[off] = hexdig[0x0f & ( data[i] >> 4 )];
line[off+1] = hexdig[0x0f & data[i]];
off = BP_GRAPH + n + ((n >= 8)?1:0);
if ( isprint( data[i] )) {
line[BP_GRAPH + n] = data[i];
} else {
line[BP_GRAPH + n] = '.';
}
}
RTMP_Log( level, "%s", line );
}
/* These should only be used by apps, never by the library itself */
void RTMP_LogPrintf(const char *format, ...)
{
char str[MAX_PRINT_LEN]="";
int len;
va_list args;
va_start(args, format);
len = vsnprintf(str, MAX_PRINT_LEN-1, format, args);
va_end(args);
if ( RTMP_debuglevel==RTMP_LOGCRIT )
return;
if ( !fmsg ) fmsg = stderr;
if (neednl) {
putc('\n', fmsg);
neednl = 0;
}
if (len > MAX_PRINT_LEN-1)
len = MAX_PRINT_LEN-1;
fprintf(fmsg, "%s", str);
if (str[len-1] == '\n')
fflush(fmsg);
}
void RTMP_LogStatus(const char *format, ...)
{
char str[MAX_PRINT_LEN]="";
va_list args;
va_start(args, format);
vsnprintf(str, MAX_PRINT_LEN-1, format, args);
va_end(args);
if ( RTMP_debuglevel==RTMP_LOGCRIT )
return;
if ( !fmsg ) fmsg = stderr;
fprintf(fmsg, "%s", str);
fflush(fmsg);
neednl = 1;
}

View File

@@ -0,0 +1,69 @@
/*
* Copyright (C) 2008-2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#ifndef __RTMP_LOG_H__
#define __RTMP_LOG_H__
#include <stdio.h>
#include <stdarg.h>
#include <stdint.h>
#ifdef __cplusplus
extern "C" {
#endif
/* Enable this to get full debugging output */
/* #define _DEBUG */
#ifdef _DEBUG
#undef NODEBUG
#endif
typedef enum
{ RTMP_LOGCRIT=0, RTMP_LOGERROR, RTMP_LOGWARNING, RTMP_LOGINFO,
RTMP_LOGDEBUG, RTMP_LOGDEBUG2, RTMP_LOGALL
} RTMP_LogLevel;
extern RTMP_LogLevel RTMP_debuglevel;
typedef void (RTMP_LogCallback)(int level, const char *fmt, va_list);
void RTMP_LogSetCallback(RTMP_LogCallback *cb);
void RTMP_LogSetOutput(FILE *file);
#ifdef __GNUC__
void RTMP_LogPrintf(const char *format, ...) __attribute__ ((__format__ (__printf__, 1, 2)));
void RTMP_LogStatus(const char *format, ...) __attribute__ ((__format__ (__printf__, 1, 2)));
void RTMP_Log(int level, const char *format, ...) __attribute__ ((__format__ (__printf__, 2, 3)));
#else
void RTMP_LogPrintf(const char *format, ...);
void RTMP_LogStatus(const char *format, ...);
void RTMP_Log(int level, const char *format, ...);
#endif
void RTMP_LogHex(int level, const uint8_t *data, unsigned long len);
void RTMP_LogHexString(int level, const uint8_t *data, unsigned long len);
void RTMP_LogSetLevel(RTMP_LogLevel lvl);
RTMP_LogLevel RTMP_LogGetLevel(void);
#ifdef __cplusplus
}
#endif
#endif

View File

@@ -0,0 +1,289 @@
/*
* Copyright (C) 2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <ctype.h>
#include "rtmp_sys.h"
#include "log.h"
int RTMP_ParseURL(const char *url, int *protocol, AVal *host, unsigned int *port,
AVal *playpath, AVal *app)
{
char *p, *end, *col, *ques, *slash;
RTMP_Log(RTMP_LOGDEBUG, "Parsing...");
*protocol = RTMP_PROTOCOL_RTMP;
*port = 0;
playpath->av_len = 0;
playpath->av_val = NULL;
app->av_len = 0;
app->av_val = NULL;
/* Old School Parsing */
/* look for usual :// pattern */
p = strstr(url, "://");
if(!p) {
RTMP_Log(RTMP_LOGERROR, "RTMP URL: No :// in url!");
return FALSE;
}
{
int len = (int)(p-url);
if(len == 4 && strncasecmp(url, "rtmp", 4)==0)
*protocol = RTMP_PROTOCOL_RTMP;
else if(len == 5 && strncasecmp(url, "rtmpt", 5)==0)
*protocol = RTMP_PROTOCOL_RTMPT;
else if(len == 5 && strncasecmp(url, "rtmps", 5)==0)
*protocol = RTMP_PROTOCOL_RTMPS;
else if(len == 5 && strncasecmp(url, "rtmpe", 5)==0)
*protocol = RTMP_PROTOCOL_RTMPE;
else if(len == 5 && strncasecmp(url, "rtmfp", 5)==0)
*protocol = RTMP_PROTOCOL_RTMFP;
else if(len == 6 && strncasecmp(url, "rtmpte", 6)==0)
*protocol = RTMP_PROTOCOL_RTMPTE;
else if(len == 6 && strncasecmp(url, "rtmpts", 6)==0)
*protocol = RTMP_PROTOCOL_RTMPTS;
else {
RTMP_Log(RTMP_LOGWARNING, "Unknown protocol!\n");
goto parsehost;
}
}
RTMP_Log(RTMP_LOGDEBUG, "Parsed protocol: %d", *protocol);
parsehost:
/* let's get the hostname */
p+=3;
/* check for sudden death */
if(*p==0) {
RTMP_Log(RTMP_LOGWARNING, "No hostname in URL!");
return FALSE;
}
end = p + strlen(p);
col = strchr(p, ':');
ques = strchr(p, '?');
slash = strchr(p, '/');
{
int hostlen;
if(slash)
hostlen = slash - p;
else
hostlen = end - p;
if(col && col -p < hostlen)
hostlen = col - p;
if(hostlen < 256) {
host->av_val = p;
host->av_len = hostlen;
RTMP_Log(RTMP_LOGDEBUG, "Parsed host : %.*s", hostlen, host->av_val);
} else {
RTMP_Log(RTMP_LOGWARNING, "Hostname exceeds 255 characters!");
}
p+=hostlen;
}
/* get the port number if available */
if(*p == ':') {
unsigned int p2;
p++;
p2 = atoi(p);
if(p2 > 65535) {
RTMP_Log(RTMP_LOGWARNING, "Invalid port number!");
} else {
*port = p2;
}
}
if(!slash) {
RTMP_Log(RTMP_LOGWARNING, "No application or playpath in URL!");
return TRUE;
}
p = slash+1;
{
/* parse application
*
* rtmp://host[:port]/app[/appinstance][/...]
* application = app[/appinstance]
*/
char *slash2, *slash3 = NULL, *slash4 = NULL;
int applen, appnamelen;
slash2 = strchr(p, '/');
if(slash2)
slash3 = strchr(slash2+1, '/');
if(slash3)
slash4 = strchr(slash3+1, '/');
applen = end-p; /* ondemand, pass all parameters as app */
appnamelen = applen; /* ondemand length */
if(ques && strstr(p, "slist=")) { /* whatever it is, the '?' and slist= means we need to use everything as app and parse plapath from slist= */
appnamelen = ques-p;
}
else if(strncmp(p, "ondemand/", 9)==0) {
/* app = ondemand/foobar, only pass app=ondemand */
applen = 8;
appnamelen = 8;
}
else { /* app!=ondemand, so app is app[/appinstance] */
if(slash4)
appnamelen = slash4-p;
else if(slash3)
appnamelen = slash3-p;
else if(slash2)
appnamelen = slash2-p;
applen = appnamelen;
}
app->av_val = p;
app->av_len = applen;
RTMP_Log(RTMP_LOGDEBUG, "Parsed app : %.*s", applen, p);
p += appnamelen;
}
if (*p == '/')
p++;
if (end-p) {
AVal av = {p, end-p};
RTMP_ParsePlaypath(&av, playpath);
}
return TRUE;
}
/*
* Extracts playpath from RTMP URL. playpath is the file part of the
* URL, i.e. the part that comes after rtmp://host:port/app/
*
* Returns the stream name in a format understood by FMS. The name is
* the playpath part of the URL with formatting depending on the stream
* type:
*
* mp4 streams: prepend "mp4:", remove extension
* mp3 streams: prepend "mp3:", remove extension
* flv streams: remove extension
*/
void RTMP_ParsePlaypath(AVal *in, AVal *out) {
int addMP4 = 0;
int addMP3 = 0;
int subExt = 0;
const char *playpath = in->av_val;
const char *temp, *q, *ext = NULL;
const char *ppstart = playpath;
char *streamname, *destptr, *p;
int pplen = in->av_len;
out->av_val = NULL;
out->av_len = 0;
if ((*ppstart == '?') &&
(temp=strstr(ppstart, "slist=")) != 0) {
ppstart = temp+6;
pplen = strlen(ppstart);
temp = strchr(ppstart, '&');
if (temp) {
pplen = temp-ppstart;
}
}
q = strchr(ppstart, '?');
if (pplen >= 4) {
if (q)
ext = q-4;
else
ext = &ppstart[pplen-4];
if ((strncmp(ext, ".f4v", 4) == 0) ||
(strncmp(ext, ".mp4", 4) == 0)) {
addMP4 = 1;
subExt = 1;
/* Only remove .flv from rtmp URL, not slist params */
} else if ((ppstart == playpath) &&
(strncmp(ext, ".flv", 4) == 0)) {
subExt = 1;
} else if (strncmp(ext, ".mp3", 4) == 0) {
addMP3 = 1;
subExt = 1;
}
}
streamname = (char *)malloc((pplen+4+1)*sizeof(char));
if (!streamname)
return;
destptr = streamname;
if (addMP4) {
if (strncmp(ppstart, "mp4:", 4)) {
strcpy(destptr, "mp4:");
destptr += 4;
} else {
subExt = 0;
}
} else if (addMP3) {
if (strncmp(ppstart, "mp3:", 4)) {
strcpy(destptr, "mp3:");
destptr += 4;
} else {
subExt = 0;
}
}
for (p=(char *)ppstart; pplen >0;) {
/* skip extension */
if (subExt && p == ext) {
p += 4;
pplen -= 4;
continue;
}
if (*p == '%') {
unsigned int c;
sscanf(p+1, "%02x", &c);
*destptr++ = c;
pplen -= 3;
p += 3;
} else {
*destptr++ = *p++;
pplen--;
}
}
*destptr = '\0';
out->av_val = streamname;
out->av_len = destptr - streamname;
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,388 @@
#ifndef __RTMP_H__
#define __RTMP_H__
/*
* Copyright (C) 2005-2008 Team XBMC
* http://www.xbmc.org
* Copyright (C) 2008-2009 Andrej Stepanchuk
* Copyright (C) 2009-2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#if !defined(NO_CRYPTO) && !defined(CRYPTO)
#define CRYPTO
#endif
#include <errno.h>
#include <stdint.h>
#include <stddef.h>
#include "amf.h"
#ifdef __cplusplus
extern "C"
{
#endif
#define RTMP_LIB_VERSION 0x020300 /* 2.3 */
#define RTMP_FEATURE_HTTP 0x01
#define RTMP_FEATURE_ENC 0x02
#define RTMP_FEATURE_SSL 0x04
#define RTMP_FEATURE_MFP 0x08 /* not yet supported */
#define RTMP_FEATURE_WRITE 0x10 /* publish, not play */
#define RTMP_FEATURE_HTTP2 0x20 /* server-side rtmpt */
#define RTMP_PROTOCOL_UNDEFINED -1
#define RTMP_PROTOCOL_RTMP 0
#define RTMP_PROTOCOL_RTMPE RTMP_FEATURE_ENC
#define RTMP_PROTOCOL_RTMPT RTMP_FEATURE_HTTP
#define RTMP_PROTOCOL_RTMPS RTMP_FEATURE_SSL
#define RTMP_PROTOCOL_RTMPTE (RTMP_FEATURE_HTTP|RTMP_FEATURE_ENC)
#define RTMP_PROTOCOL_RTMPTS (RTMP_FEATURE_HTTP|RTMP_FEATURE_SSL)
#define RTMP_PROTOCOL_RTMFP RTMP_FEATURE_MFP
#define RTMP_DEFAULT_CHUNKSIZE 128
/* needs to fit largest number of bytes recv() may return */
#define RTMP_BUFFER_CACHE_SIZE (16*1024)
#define RTMP_CHANNELS 65600
extern const char RTMPProtocolStringsLower[][7];
extern const AVal RTMP_DefaultFlashVer;
extern int RTMP_ctrlC;
uint32_t RTMP_GetTime(void);
/* RTMP_PACKET_TYPE_... 0x00 */
#define RTMP_PACKET_TYPE_CHUNK_SIZE 0x01
/* RTMP_PACKET_TYPE_... 0x02 */
#define RTMP_PACKET_TYPE_BYTES_READ_REPORT 0x03
#define RTMP_PACKET_TYPE_CONTROL 0x04
#define RTMP_PACKET_TYPE_SERVER_BW 0x05
#define RTMP_PACKET_TYPE_CLIENT_BW 0x06
/* RTMP_PACKET_TYPE_... 0x07 */
#define RTMP_PACKET_TYPE_AUDIO 0x08
#define RTMP_PACKET_TYPE_VIDEO 0x09
/* RTMP_PACKET_TYPE_... 0x0A */
/* RTMP_PACKET_TYPE_... 0x0B */
/* RTMP_PACKET_TYPE_... 0x0C */
/* RTMP_PACKET_TYPE_... 0x0D */
/* RTMP_PACKET_TYPE_... 0x0E */
#define RTMP_PACKET_TYPE_FLEX_STREAM_SEND 0x0F
#define RTMP_PACKET_TYPE_FLEX_SHARED_OBJECT 0x10
#define RTMP_PACKET_TYPE_FLEX_MESSAGE 0x11
#define RTMP_PACKET_TYPE_INFO 0x12
#define RTMP_PACKET_TYPE_SHARED_OBJECT 0x13
#define RTMP_PACKET_TYPE_INVOKE 0x14
/* RTMP_PACKET_TYPE_... 0x15 */
#define RTMP_PACKET_TYPE_FLASH_VIDEO 0x16
#define RTMP_MAX_HEADER_SIZE 18
#define RTMP_PACKET_SIZE_LARGE 0
#define RTMP_PACKET_SIZE_MEDIUM 1
#define RTMP_PACKET_SIZE_SMALL 2
#define RTMP_PACKET_SIZE_MINIMUM 3
typedef struct RTMPChunk
{
int c_headerSize;
int c_chunkSize;
char *c_chunk;
char c_header[RTMP_MAX_HEADER_SIZE];
} RTMPChunk;
typedef struct RTMPPacket
{
uint8_t m_headerType;
uint8_t m_packetType;
uint8_t m_hasAbsTimestamp; /* timestamp absolute or relative? */
int m_nChannel;
uint32_t m_nTimeStamp; /* timestamp */
int32_t m_nInfoField2; /* last 4 bytes in a long header */
uint32_t m_nBodySize;
uint32_t m_nBytesRead;
RTMPChunk *m_chunk;
char *m_body;
} RTMPPacket;
typedef struct RTMPSockBuf
{
int sb_socket;
int sb_size; /* number of unprocessed bytes in buffer */
char *sb_start; /* pointer into sb_pBuffer of next byte to process */
char sb_buf[RTMP_BUFFER_CACHE_SIZE]; /* data read from socket */
int sb_timedout;
void *sb_ssl;
} RTMPSockBuf;
void RTMPPacket_Reset(RTMPPacket *p);
void RTMPPacket_Dump(RTMPPacket *p);
int RTMPPacket_Alloc(RTMPPacket *p, int nSize);
void RTMPPacket_Free(RTMPPacket *p);
#define RTMPPacket_IsReady(a) ((a)->m_nBytesRead == (a)->m_nBodySize)
typedef struct RTMP_LNK
{
AVal hostname;
AVal sockshost;
AVal playpath0; /* parsed from URL */
AVal playpath; /* passed in explicitly */
AVal tcUrl;
AVal swfUrl;
AVal pageUrl;
AVal app;
AVal auth;
AVal flashVer;
AVal subscribepath;
AVal usherToken;
AVal token;
AVal pubUser;
AVal pubPasswd;
AMFObject extras;
int edepth;
int seekTime;
int stopTime;
#define RTMP_LF_AUTH 0x0001 /* using auth param */
#define RTMP_LF_LIVE 0x0002 /* stream is live */
#define RTMP_LF_SWFV 0x0004 /* do SWF verification */
#define RTMP_LF_PLST 0x0008 /* send playlist before play */
#define RTMP_LF_BUFX 0x0010 /* toggle stream on BufferEmpty msg */
#define RTMP_LF_FTCU 0x0020 /* free tcUrl on close */
int lFlags;
int swfAge;
int protocol;
int timeout; /* connection timeout in seconds */
#define RTMP_PUB_NAME 0x0001 /* send login to server */
#define RTMP_PUB_RESP 0x0002 /* send salted password hash */
#define RTMP_PUB_ALLOC 0x0004 /* allocated data for new tcUrl & app */
#define RTMP_PUB_CLEAN 0x0008 /* need to free allocated data for newer tcUrl & app at exit */
#define RTMP_PUB_CLATE 0x0010 /* late clean tcUrl & app at exit */
int pFlags;
unsigned short socksport;
unsigned short port;
#ifdef CRYPTO
#define RTMP_SWF_HASHLEN 32
void *dh; /* for encryption */
void *rc4keyIn;
void *rc4keyOut;
uint32_t SWFSize;
uint8_t SWFHash[RTMP_SWF_HASHLEN];
char SWFVerificationResponse[RTMP_SWF_HASHLEN+10];
#endif
} RTMP_LNK;
/* state for read() wrapper */
typedef struct RTMP_READ
{
char *buf;
char *bufpos;
unsigned int buflen;
uint32_t timestamp;
uint8_t dataType;
uint8_t flags;
#define RTMP_READ_HEADER 0x01
#define RTMP_READ_RESUME 0x02
#define RTMP_READ_NO_IGNORE 0x04
#define RTMP_READ_GOTKF 0x08
#define RTMP_READ_GOTFLVK 0x10
#define RTMP_READ_SEEKING 0x20
int8_t status;
#define RTMP_READ_COMPLETE -3
#define RTMP_READ_ERROR -2
#define RTMP_READ_EOF -1
#define RTMP_READ_IGNORE 0
/* if bResume == TRUE */
uint8_t initialFrameType;
uint32_t nResumeTS;
char *metaHeader;
char *initialFrame;
uint32_t nMetaHeaderSize;
uint32_t nInitialFrameSize;
uint32_t nIgnoredFrameCounter;
uint32_t nIgnoredFlvFrameCounter;
} RTMP_READ;
typedef struct RTMP_METHOD
{
AVal name;
int num;
} RTMP_METHOD;
//lake
int _sockerr;
//lake
typedef struct RTMP
{
int m_inChunkSize;
int m_outChunkSize;
int m_nBWCheckCounter;
int m_nBytesIn;
int m_nBytesInSent;
int m_nBufferMS;
int m_stream_id; /* returned in _result from createStream */
int m_mediaChannel;
uint32_t m_mediaStamp;
uint32_t m_pauseStamp;
int m_pausing;
int m_nServerBW;
int m_nClientBW;
uint8_t m_nClientBW2;
uint8_t m_bPlaying;
uint8_t m_bSendEncoding;
uint8_t m_bSendCounter;
int m_numInvokes;
int m_numCalls;
RTMP_METHOD *m_methodCalls; /* remote method calls queue */
int m_channelsAllocatedIn;
int m_channelsAllocatedOut;
RTMPPacket **m_vecChannelsIn;
RTMPPacket **m_vecChannelsOut;
int *m_channelTimestamp; /* abs timestamp of last packet */
double m_fAudioCodecs; /* audioCodecs for the connect packet */
double m_fVideoCodecs; /* videoCodecs for the connect packet */
double m_fEncoding; /* AMF0 or AMF3 */
double m_fDuration; /* duration of stream in seconds */
int m_msgCounter; /* RTMPT stuff */
int m_polling;
int m_resplen;
int m_unackd;
AVal m_clientID;
RTMP_READ m_read;
RTMPPacket m_write;
RTMPSockBuf m_sb;
RTMP_LNK Link;
//lake
char ipaddr[16];
} RTMP;
int RTMP_ParseURL(const char *url, int *protocol, AVal *host,
unsigned int *port, AVal *playpath, AVal *app);
void RTMP_ParsePlaypath(AVal *in, AVal *out);
void RTMP_SetBufferMS(RTMP *r, int size);
void RTMP_UpdateBufferMS(RTMP *r);
int RTMP_SetOpt(RTMP *r, const AVal *opt, AVal *arg);
int RTMP_SetupURL(RTMP *r, char *url);
void RTMP_SetupStream(RTMP *r, int protocol,
AVal *hostname,
unsigned int port,
AVal *sockshost,
AVal *playpath,
AVal *tcUrl,
AVal *swfUrl,
AVal *pageUrl,
AVal *app,
AVal *auth,
AVal *swfSHA256Hash,
uint32_t swfSize,
AVal *flashVer,
AVal *subscribepath,
AVal *usherToken,
int dStart,
int dStop, int bLiveStream, long int timeout);
int RTMP_Connect(RTMP *r, RTMPPacket *cp);
struct sockaddr;
int RTMP_Connect0(RTMP *r, struct sockaddr *svc);
int RTMP_Connect1(RTMP *r, RTMPPacket *cp);
int RTMP_Serve(RTMP *r);
int RTMP_TLS_Accept(RTMP *r, void *ctx);
int RTMP_ReadPacket(RTMP *r, RTMPPacket *packet);
int RTMP_SendPacket(RTMP *r, RTMPPacket *packet, int queue);
int RTMP_SendChunk(RTMP *r, RTMPChunk *chunk);
int RTMP_IsConnected(RTMP *r);
int RTMP_Socket(RTMP *r);
int RTMP_IsTimedout(RTMP *r);
double RTMP_GetDuration(RTMP *r);
int RTMP_ToggleStream(RTMP *r);
int RTMP_ConnectStream(RTMP *r, int seekTime);
int RTMP_ReconnectStream(RTMP *r, int seekTime);
void RTMP_DeleteStream(RTMP *r);
int RTMP_GetNextMediaPacket(RTMP *r, RTMPPacket *packet);
int RTMP_ClientPacket(RTMP *r, RTMPPacket *packet);
void RTMP_Init(RTMP *r);
void RTMP_Close(RTMP *r);
RTMP *RTMP_Alloc(void);
void RTMP_Free(RTMP *r);
void RTMP_EnableWrite(RTMP *r);
void *RTMP_TLS_AllocServerContext(const char* cert, const char* key);
void RTMP_TLS_FreeServerContext(void *ctx);
int RTMP_LibVersion(void);
void RTMP_UserInterrupt(void); /* user typed Ctrl-C */
int RTMP_SendCtrl(RTMP *r, short nType, unsigned int nObject,
unsigned int nTime);
/* caller probably doesn't know current timestamp, should
* just use RTMP_Pause instead
*/
int RTMP_SendPause(RTMP *r, int DoPause, int dTime);
int RTMP_Pause(RTMP *r, int DoPause);
int RTMP_FindFirstMatchingProperty(AMFObject *obj, const AVal *name,
AMFObjectProperty * p);
int RTMPSockBuf_Fill(RTMPSockBuf *sb);
int RTMPSockBuf_Send(RTMPSockBuf *sb, const char *buf, int len);
int RTMPSockBuf_Close(RTMPSockBuf *sb);
int RTMP_SendCreateStream(RTMP *r);
int RTMP_SendSeek(RTMP *r, int dTime);
int RTMP_SendServerBW(RTMP *r);
int RTMP_SendClientBW(RTMP *r);
void RTMP_DropRequest(RTMP *r, int i, int freeit);
int RTMP_Read(RTMP *r, char *buf, int size);
int RTMP_Write(RTMP *r, const char *buf, int size);
/* hashswf.c */
int RTMP_HashSWF(const char *url, unsigned int *size, unsigned char *hash,
int age);
#ifdef __cplusplus
};
#endif
#endif

View File

@@ -0,0 +1,139 @@
#ifndef __RTMP_SYS_H__
#define __RTMP_SYS_H__
/*
* Copyright (C) 2010 Howard Chu
*
* This file is part of librtmp.
*
* librtmp is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1,
* or (at your option) any later version.
*
* librtmp is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with librtmp see the file COPYING. If not, write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/lgpl.html
*/
#ifdef _WIN32
#include <winsock2.h>
#include <ws2tcpip.h>
#ifdef _MSC_VER /* MSVC */
#define snprintf _snprintf
#define strcasecmp stricmp
#define strncasecmp strnicmp
#define vsnprintf _vsnprintf
#endif
#define GetSockError() WSAGetLastError()
#define SetSockError(e) WSASetLastError(e)
#define setsockopt(a,b,c,d,e) (setsockopt)(a,b,c,(const char *)d,(int)e)
#define EWOULDBLOCK WSAETIMEDOUT /* we don't use nonblocking, but we do use timeouts */
#define sleep(n) Sleep(n*1000)
#define msleep(n) Sleep(n)
#define SET_RCVTIMEO(tv,s) int tv = s*1000
#else /* !_WIN32 */
#include <sys/types.h>
#include <sys/socket.h>
#include <sys/times.h>
#include <netdb.h>
#include <unistd.h>
#include <netinet/in.h>
#include <netinet/tcp.h>
#include <arpa/inet.h>
#define GetSockError() errno
#define SetSockError(e) errno = e
#undef closesocket
#define closesocket(s) close(s)
#define msleep(n) usleep(n*1000)
#define SET_RCVTIMEO(tv,s) struct timeval tv = {s,0}
#endif
#include "rtmp.h"
#ifdef USE_POLARSSL
#include <polarssl/version.h>
#include <polarssl/net.h>
#include <polarssl/ssl.h>
#include <polarssl/havege.h>
#if POLARSSL_VERSION_NUMBER < 0x01010000
#define havege_random havege_rand
#endif
#if POLARSSL_VERSION_NUMBER >= 0x01020000
#define SSL_SET_SESSION(S,resume,timeout,ctx) ssl_set_session(S,ctx)
#else
#define SSL_SET_SESSION(S,resume,timeout,ctx) ssl_set_session(S,resume,timeout,ctx)
#endif
typedef struct tls_ctx {
havege_state hs;
ssl_session ssn;
} tls_ctx;
typedef struct tls_server_ctx {
havege_state *hs;
x509_cert cert;
rsa_context key;
ssl_session ssn;
const char *dhm_P, *dhm_G;
} tls_server_ctx;
#define TLS_CTX tls_ctx *
#define TLS_client(ctx,s) s = malloc(sizeof(ssl_context)); ssl_init(s);\
ssl_set_endpoint(s, SSL_IS_CLIENT); ssl_set_authmode(s, SSL_VERIFY_NONE);\
ssl_set_rng(s, havege_random, &ctx->hs);\
ssl_set_ciphersuites(s, ssl_default_ciphersuites);\
SSL_SET_SESSION(s, 1, 600, &ctx->ssn)
#define TLS_server(ctx,s) s = malloc(sizeof(ssl_context)); ssl_init(s);\
ssl_set_endpoint(s, SSL_IS_SERVER); ssl_set_authmode(s, SSL_VERIFY_NONE);\
ssl_set_rng(s, havege_random, ((tls_server_ctx*)ctx)->hs);\
ssl_set_ciphersuites(s, ssl_default_ciphersuites);\
SSL_SET_SESSION(s, 1, 600, &((tls_server_ctx*)ctx)->ssn);\
ssl_set_own_cert(s, &((tls_server_ctx*)ctx)->cert, &((tls_server_ctx*)ctx)->key);\
ssl_set_dh_param(s, ((tls_server_ctx*)ctx)->dhm_P, ((tls_server_ctx*)ctx)->dhm_G)
#define TLS_setfd(s,fd) ssl_set_bio(s, net_recv, &fd, net_send, &fd)
#define TLS_connect(s) ssl_handshake(s)
#define TLS_accept(s) ssl_handshake(s)
#define TLS_read(s,b,l) ssl_read(s,(unsigned char *)b,l)
#define TLS_write(s,b,l) ssl_write(s,(unsigned char *)b,l)
#define TLS_shutdown(s) ssl_close_notify(s)
#define TLS_close(s) ssl_free(s); free(s)
#elif defined(USE_GNUTLS)
#include <gnutls/gnutls.h>
typedef struct tls_ctx {
gnutls_certificate_credentials_t cred;
gnutls_priority_t prios;
} tls_ctx;
#define TLS_CTX tls_ctx *
#define TLS_client(ctx,s) gnutls_init((gnutls_session_t *)(&s), GNUTLS_CLIENT); gnutls_priority_set(s, ctx->prios); gnutls_credentials_set(s, GNUTLS_CRD_CERTIFICATE, ctx->cred)
#define TLS_server(ctx,s) gnutls_init((gnutls_session_t *)(&s), GNUTLS_SERVER); gnutls_priority_set_direct(s, "NORMAL", NULL); gnutls_credentials_set(s, GNUTLS_CRD_CERTIFICATE, ctx)
#define TLS_setfd(s,fd) gnutls_transport_set_ptr(s, (gnutls_transport_ptr_t)(long)fd)
#define TLS_connect(s) gnutls_handshake(s)
#define TLS_accept(s) gnutls_handshake(s)
#define TLS_read(s,b,l) gnutls_record_recv(s,b,l)
#define TLS_write(s,b,l) gnutls_record_send(s,b,l)
#define TLS_shutdown(s) gnutls_bye(s, GNUTLS_SHUT_RDWR)
#define TLS_close(s) gnutls_deinit(s)
#else /* USE_OPENSSL */
#define TLS_CTX SSL_CTX *
#define TLS_client(ctx,s) s = SSL_new(ctx)
#define TLS_server(ctx,s) s = SSL_new(ctx)
#define TLS_setfd(s,fd) SSL_set_fd(s,fd)
#define TLS_connect(s) SSL_connect(s)
#define TLS_accept(s) SSL_accept(s)
#define TLS_read(s,b,l) SSL_read(s,b,l)
#define TLS_write(s,b,l) SSL_write(s,b,l)
#define TLS_shutdown(s) SSL_shutdown(s)
#define TLS_close(s) SSL_free(s)
#endif
#endif

View File

@@ -0,0 +1,28 @@
#ifndef __LEUDLOG_H__
#define __LEUDLOG_H__
#define ENABLE_LOG 1
#ifdef __ANDROID__
#include <android/log.h>
#define LOG_TAG "RESRTMP"
#ifdef ENABLE_LOG
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)
#else
#define LOGD(...)
#endif
#else
#include <stdio.h>
#ifdef ENABLE_LOG
#define LOGD(...) printf(__VA_ARGS__)
#else
#define LOGD(...)
#endif
#endif
#endif

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More