關(guān)于cmakeList的配置,這里就直接給出代碼:
cmake_minimum_required(VERSION 3.4.1)
# 引入指定目錄下的CMakeLists.txt
add_subdirectory(src/main/cpp/librtmp)
add_library(
native-lib
SHARED
src/main/cpp/native-lib.cpp
src/main/cpp/VideoChannel.cpp)
include_directories(src/main/cpp/include)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -L${CMAKE_SOURCE_DIR}/src/main/cpp/libs/${ANDROID_ABI}")
target_link_libraries(
native-lib
rtmp
x264
log)
?在視頻推流中,java層主要實現(xiàn)初始化、設(shè)置畫布、轉(zhuǎn)攝像頭、開始直播和停止直播操作:
MainActivity:
import androidx.appcompat.app.AppCompatActivity;
import android.hardware.Camera;
import android.os.Bundle;
import android.view.SurfaceView;
import android.view.View;
import android.widget.TextView;
import com.example.rtmp.databinding.ActivityMainBinding;
import com.example.rtmp.live.LivePusher;
public class MainActivity extends AppCompatActivity {
private LivePusher livePusher;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate( savedInstanceState );
SurfaceView surfaceView = findViewById(R.id.surfaceView);
livePusher = new LivePusher(this, 800, 480, 800_000, 10, Camera.CameraInfo.CAMERA_FACING_BACK);
// 設(shè)置攝像頭預(yù)覽的界面
livePusher.setPreviewDisplay(surfaceView.getHolder());
}
public void switchCamera(View view) {
}
public void startLive(View view) {
livePusher.startLive("rtmp://47.75.90.219/myapp/mystream");
}
public void stopLive(View view) {
}
}
LivePush代碼:
package com.example.rtmp.live;
import android.app.Activity;
import android.view.SurfaceHolder;
import com.example.rtmp.live.channel.AudioChannel;
import com.example.rtmp.live.channel.VideoChannel;
public class LivePusher {
static {
System.loadLibrary("native-lib");
}
private AudioChannel audioChannel;
private VideoChannel videoChannel;
public LivePusher(Activity activity, int width, int height, int bitrate,
int fps, int cameraId) {
native_init();
videoChannel = new VideoChannel(this,activity, width, height, bitrate, fps, cameraId);
audioChannel = new AudioChannel();
}
public void setPreviewDisplay(SurfaceHolder surfaceHolder) {
videoChannel.setPreviewDisplay(surfaceHolder);
}
public void switchCamera() {
videoChannel.switchCamera();
}
public void startLive(String path) { //打開編碼器之后進(jìn)行開始直播
native_start(path); //啟動一個線程連接服務(wù)器
videoChannel.startLive();
audioChannel.startLive();
}
public void stopLive(){
videoChannel.stopLive();
audioChannel.stopLive();
native_stop();
}
public native void native_init();
public native void native_start(String path);
public native void native_setVideoEncInfo(int width, int height, int fps, int bitrate);
public native void native_pushVideo(byte[] data);
public native void native_stop();
public native void native_release();
}
CameraHelper代碼:
package com.example.rtmp.live.channel;
import android.app.Activity;
import android.graphics.ImageFormat;
import android.hardware.Camera;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import java.util.Iterator;
import java.util.List;
public class CameraHelper implements SurfaceHolder.Callback, Camera.PreviewCallback {
private static final String TAG = "CameraHelper";
private Activity mActivity;
private int mHeight;
private int mWidth;
private int mCameraId;
private Camera mCamera;
private byte[] buffer;
private SurfaceHolder mSurfaceHolder;
private Camera.PreviewCallback mPreviewCallback;
private int mRotation;
private OnChangedSizeListener mOnChangedSizeListener;
public CameraHelper(Activity activity, int cameraId, int width, int height) {
mActivity = activity;
mCameraId = cameraId;
mWidth = width;
mHeight = height;
}
public void switchCamera() {
if (mCameraId == Camera.CameraInfo.CAMERA_FACING_BACK) {
mCameraId = Camera.CameraInfo.CAMERA_FACING_FRONT;
} else {
mCameraId = Camera.CameraInfo.CAMERA_FACING_BACK;
}
stopPreview();
startPreview();
}
private void stopPreview() {
if (mCamera != null) {
//預(yù)覽數(shù)據(jù)回調(diào)接口
mCamera.setPreviewCallback(null);
//停止預(yù)覽
mCamera.stopPreview();
//釋放攝像頭
mCamera.release();
mCamera = null;
}
}
private void startPreview() {
try {
//獲得camera對象
mCamera = Camera.open(mCameraId);
//配置camera的屬性
Camera.Parameters parameters = mCamera.getParameters();
//設(shè)置預(yù)覽數(shù)據(jù)格式為nv21
parameters.setPreviewFormat(ImageFormat.NV21);
//這是攝像頭寬、高
setPreviewSize(parameters);
// 設(shè)置攝像頭 圖像傳感器的角度、方向
setPreviewOrientation(parameters);
mCamera.setParameters(parameters);
buffer = new byte[mWidth * mHeight * 3 / 2];
//數(shù)據(jù)緩存區(qū)
mCamera.addCallbackBuffer(buffer);
mCamera.setPreviewCallbackWithBuffer(this);
//設(shè)置預(yù)覽畫面
mCamera.setPreviewDisplay(mSurfaceHolder);
mOnChangedSizeListener.onChanged(mWidth, mHeight);
mCamera.startPreview();
} catch (Exception ex) {
ex.printStackTrace();
}
}
private void setPreviewOrientation(Camera.Parameters parameters) {
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(mCameraId, info);
mRotation = mActivity.getWindowManager().getDefaultDisplay().getRotation();
int degrees = 0;
switch (mRotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90: // 橫屏 左邊是頭部(home鍵在右邊)
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:// 橫屏 頭部在右邊
degrees = 270;
break;
}
int result;
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
result = (info.orientation + degrees) % 360;
result = (360 - result) % 360; // compensate the mirror
} else { // back-facing
result = (info.orientation - degrees + 360) % 360;
}
//設(shè)置角度
mCamera.setDisplayOrientation(result);
}
private void setPreviewSize(Camera.Parameters parameters) {
//獲取攝像頭支持的寬、高
List<Camera.Size> supportedPreviewSizes = parameters.getSupportedPreviewSizes();
Camera.Size size = supportedPreviewSizes.get(0);
Log.d(TAG, "支持 " + size.width + "x" + size.height);
//選擇一個與設(shè)置的差距最小的支持分辨率
// 10x10 20x20 30x30
// 12x12
int m = Math.abs(size.height * size.width - mWidth * mHeight);
supportedPreviewSizes.remove(0);
Iterator<Camera.Size> iterator = supportedPreviewSizes.iterator();
//遍歷
while (iterator.hasNext()) {
Camera.Size next = iterator.next();
Log.d(TAG, "支持 " + next.width + "x" + next.height);
int n = Math.abs(next.height * next.width - mWidth * mHeight);
if (n < m) {
m = n;
size = next;
}
}
mWidth = size.width;
mHeight = size.height;
parameters.setPreviewSize(mWidth, mHeight);
Log.d(TAG, "設(shè)置預(yù)覽分辨率 width:" + size.width + " height:" + size.height);
}
public void setPreviewDisplay(SurfaceHolder surfaceHolder) {
mSurfaceHolder = surfaceHolder;
mSurfaceHolder.addCallback(this);
}
public void setPreviewCallback(Camera.PreviewCallback previewCallback) {
mPreviewCallback = previewCallback;
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
//釋放攝像頭
stopPreview();
//開啟攝像頭
startPreview();
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
stopPreview();
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
// data數(shù)據(jù)依然是倒的
mPreviewCallback.onPreviewFrame(data, camera);
camera.addCallbackBuffer(buffer);
}
public void setOnChangedSizeListener(OnChangedSizeListener listener) {
mOnChangedSizeListener = listener;
}
public interface OnChangedSizeListener {
void onChanged(int w, int h);
}
}
VideoChannel:
package com.example.rtmp.live.channel;
import android.app.Activity;
import android.hardware.Camera;
import android.view.SurfaceHolder;
import com.example.rtmp.live.LivePusher;
public class VideoChannel implements Camera.PreviewCallback, CameraHelper.OnChangedSizeListener {
private LivePusher mLivePusher;
private CameraHelper cameraHelper;
private int mBitrate;
private int mFps;
private boolean isLiving;
public VideoChannel(LivePusher livePusher, Activity activity, int width, int height, int bitrate, int fps, int cameraId) {
mLivePusher = livePusher;
mBitrate = bitrate;
mFps = fps;
cameraHelper = new CameraHelper(activity, cameraId, width, height);
//1、讓camerahelper的
cameraHelper.setPreviewCallback(this);
//2、回調(diào) 真實的攝像頭數(shù)據(jù)寬、高
cameraHelper.setOnChangedSizeListener(this);
}
public void setPreviewDisplay(SurfaceHolder surfaceHolder) {
cameraHelper.setPreviewDisplay(surfaceHolder);
}
/**
* 得到nv21數(shù)據(jù) 已經(jīng)旋轉(zhuǎn)好的
*
* @param data
* @param camera
*/
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
if (isLiving) {
mLivePusher.native_pushVideo(data); //將data送到native然后進(jìn)行編碼播放
}
}
public void switchCamera() {
cameraHelper.switchCamera();
}
/**
* 真實攝像頭數(shù)據(jù)的寬、高
* @param w
* @param h
*/
@Override
public void onChanged(int w, int h) {
//初始化編碼器
mLivePusher.native_setVideoEncInfo(w, h, mFps, mBitrate);
}
public void startLive() { //跳到onPreviewFrame
isLiving = true;
}
public void stopLive() {
isLiving = false;
}
}
可以使用debug看一看操作的流程
下面進(jìn)行jni層的開發(fā):
首先要初始化設(shè)置:
SafeQueue<RTMPPacket*> packets; //打包好的數(shù)據(jù)
VideoChannel *videoChannel=0;
int isstart=0;
pthread_t pid;
void releasePackets(RTMPPacket*& packet){
DELETE(packet);
}
extern "C"
JNIEXPORT void JNICALL
Java_com_example_rtmp_live_LivePusher_native_1init(JNIEnv *env, jobject instance) {
// 準(zhǔn)備一個Video編碼器的工具類:進(jìn)行編碼
videoChannel=new VideoChannel;
//準(zhǔn)備一個隊列,打包好的數(shù)據(jù) 放入隊列,在線程中統(tǒng)一的取出數(shù)據(jù)在再送給服務(wù)器
packets.setReleaseCallback(releasePackets);
}
初始化編碼器:
extern "C"
JNIEXPORT void JNICALL
Java_com_example_rtmp_live_LivePusher_native_1setVideoEncInfo(JNIEnv *env, jobject instance, jint width,
jint height, jint fps, jint bitrate) {
//
if (videoChannel){
videoChannel->setVideoEncInfo(width,height,fps,bitrate);
}
}
setVideoEncInfo代碼如下:
void VideoChannel::setVideoEncInfo(jint width, jint height, jint fps, jint bitrate) {
pthread_mutex_lock(&mutex);
mWidtd=width;
mHeight=height;
mFps=fps;
mBitrate=bitrate;
if (videoCodec){ //進(jìn)行判斷是因為后面進(jìn)行跳轉(zhuǎn)攝像頭需要進(jìn)行釋放之前的攝像頭
x264_encoder_close(videoCodec);
videoCodec=0;
}
//打開x264編碼器
//x264編碼器的屬性
x264_param_t param;
//2:最快
//3: 無延遲編碼
x264_param_default_preset(¶m,"ultrafast","zerolatency");
//base_line 3.2 編碼規(guī)格
param.i_level_idc = 32;
//輸入數(shù)據(jù)格式
param.i_csp = X264_CSP_I420;
param.i_width = width;
param.i_height = height;
//無b幀
param.i_bframe = 0;
//參數(shù)i_rc_method表示碼率控制,CQP(恒定質(zhì)量),CRF(恒定碼率),ABR(平均碼率)
param.rc.i_rc_method = X264_RC_ABR;
//碼率(比特率,單位Kbps)
param.rc.i_bitrate = bitrate / 1000;
//瞬時最大碼率
param.rc.i_vbv_max_bitrate = bitrate / 1000 * 1.2;
//設(shè)置了i_vbv_max_bitrate必須設(shè)置此參數(shù),碼率控制區(qū)大小,單位kbps
param.rc.i_vbv_buffer_size = bitrate / 1000;
//幀率
param.i_fps_num = fps;
param.i_fps_den = 1;
param.i_timebase_den = param.i_fps_num;
param.i_timebase_num = param.i_fps_den;
// param.pf_log = x264_log_default2;
//用fps而不是時間戳來計算幀間距離
param.b_vfr_input = 0;
//幀距離(關(guān)鍵幀) 2s一個關(guān)鍵幀
param.i_keyint_max = fps * 2;
// 是否復(fù)制sps和pps放在每個關(guān)鍵幀的前面 該參數(shù)設(shè)置是讓每個關(guān)鍵幀(I幀)都附帶sps/pps。
param.b_repeat_headers = 1;
//多線程
param.i_threads = 1;
x264_param_apply_profile(¶m,"baseline");
//打開編碼器
videoCodec= x264_encoder_open(¶m);
pthread_mutex_unlock(&mutex);
}
加一個互斥鎖 多線程的時候為了保護線程安全,一般加互斥鎖。
開始直播:
void *start(void *args) {
char *url = static_cast<char *>(args);
RTMP *rtmp = 0;
do {
rtmp = RTMP_Alloc();
if (!rtmp) {
LOGE("alloc rtmp失敗");
break;
}
RTMP_Init(rtmp);
int ret = RTMP_SetupURL(rtmp, url);
if (!ret) {
LOGE("設(shè)置地址失敗:%s", url);
break;
}
//5s超時時間
rtmp->Link.timeout = 5;
RTMP_EnableWrite(rtmp);
ret = RTMP_Connect(rtmp, 0);
if (!ret) {
LOGE("連接服務(wù)器:%s", url);
break;
}
ret = RTMP_ConnectStream(rtmp, 0);
if (!ret) {
LOGE("連接流:%s", url);
break;
}
//記錄一個開始時間
start_time = RTMP_GetTime();
//表示可以開始推流了
readyPushing = 1;
packets.setWork(1);
RTMPPacket *packet = 0;
while (readyPushing) {
packets.pop(packet);
if (!isStart) {
break;
}
if (!packet) {
continue;
}
packet->m_nInfoField2 = rtmp->m_stream_id;
//發(fā)送rtmp包 1:隊列
// 意外斷網(wǎng)?發(fā)送失敗,rtmpdump 內(nèi)部會調(diào)用RTMP_Close
// RTMP_Close 又會調(diào)用 RTMP_SendPacket
// RTMP_SendPacket 又會調(diào)用 RTMP_Close
// 將rtmp.c 里面WriteN方法的 Rtmp_Close注釋掉
ret = RTMP_SendPacket(rtmp, packet, 1);
releasePackets(packet);
if (!ret) {
LOGE("發(fā)送失敗");
break;
}
}
releasePackets(packet);
} while (0);
//
isStart = 0;
readyPushing = 0;
packets.setWork(0);
packets.clear();
if (rtmp) {
RTMP_Close(rtmp);
RTMP_Free(rtmp);
}
delete (url);
return 0;
}
extern "C"
JNIEXPORT void JNICALL
Java_com_example_push_LivePusher_native_1start(JNIEnv *env, jobject thiz, jstring path_) {
if (isStart) {
return;
}
const char *path = env->GetStringUTFChars(path_, 0);
char *url = new char[strlen(path) + 1];
strcpy(url, path);
isStart = 1;
//啟動線程
pthread_create(&pid, 0, start, url);
env->ReleaseStringUTFChars(path_, path);
}
?編碼并發(fā)送sps和pps以及I幀:
extern "C"
JNIEXPORT void JNICALL
Java_com_example_push_LivePusher_native_1pushVideo(JNIEnv *env, jobject thiz, jbyteArray data_) {
if (!videoChannel || !readyPushing) {
return;
}
jbyte *data = env->GetByteArrayElements(data_, NULL);
videoChannel->encodeData(data);
env->ReleaseByteArrayElements(data_, data, 0);
}
void VideoChannel::encodeData(int8_t *data) {
pthread_mutex_lock(&mutex);
//y數(shù)據(jù)
memcpy(pic_in->img.plane[0], data, ySize);
for (int i = 0; i < uvSize; ++i) {
//u數(shù)據(jù)
*(pic_in->img.plane[1] + i) = *(data + ySize + i * 2 + 1);
*(pic_in->img.plane[2] + i) = *(data + ySize + i * 2);
}
//編碼出來的數(shù)據(jù) (幀數(shù)據(jù))
x264_nal_t *pp_nal;
//編碼出來有幾個數(shù)據(jù) (多少幀)
int pi_nal;
x264_picture_t pic_out;
x264_encoder_encode(videoCodec, &pp_nal, &pi_nal, pic_in, &pic_out);
//如果是關(guān)鍵幀 3
int sps_len;
int pps_len;
uint8_t sps[100];
uint8_t pps[100];
for (int i = 0; i < pi_nal; ++i) {
if (pp_nal[i].i_type == NAL_SPS) {
//排除掉 h264的間隔 00 00 00 01
sps_len = pp_nal[i].i_payload - 4;
memcpy(sps, pp_nal[i].p_payload + 4, sps_len);
} else if (pp_nal[i].i_type == NAL_PPS) {
pps_len = pp_nal[i].i_payload - 4;
memcpy(pps, pp_nal[i].p_payload + 4, pps_len);
//pps肯定是跟著sps的
sendSpsPps(sps, pps, sps_len, pps_len);
} else {
sendFrame(pp_nal[i].i_type, pp_nal[i].p_payload, pp_nal[i].i_payload);
}
}
pthread_mutex_unlock(&mutex);
}
- 發(fā)送sps 和pps
-
void VideoChannel::sendSpsPps(uint8_t *sps, uint8_t *pps, int sps_len, int pps_len) { //看表 int bodySize = 13 + sps_len + 3 + pps_len; RTMPPacket *packet = new RTMPPacket; // RTMPPacket_Alloc(packet, bodySize); int i = 0; //固定頭 packet->m_body[i++] = 0x17; //類型 packet->m_body[i++] = 0x00; //composition time 0x000000 packet->m_body[i++] = 0x00; packet->m_body[i++] = 0x00; packet->m_body[i++] = 0x00; //版本 packet->m_body[i++] = 0x01; //編碼規(guī)格 packet->m_body[i++] = sps[1]; packet->m_body[i++] = sps[2]; packet->m_body[i++] = sps[3]; packet->m_body[i++] = 0xFF; //整個sps packet->m_body[i++] = 0xE1; //sps長度 packet->m_body[i++] = (sps_len >> 8) & 0xff; packet->m_body[i++] = sps_len & 0xff; memcpy(&packet->m_body[i], sps, sps_len); i += sps_len; //pps packet->m_body[i++] = 0x01; packet->m_body[i++] = (pps_len >> 8) & 0xff; packet->m_body[i++] = (pps_len) & 0xff; memcpy(&packet->m_body[i], pps, pps_len); //視頻 packet->m_packetType = RTMP_PACKET_TYPE_VIDEO; packet->m_nBodySize = bodySize; //隨意分配一個管道(盡量避開rtmp.c中使用的) packet->m_nChannel = 10; //sps pps沒有時間戳 packet->m_nTimeStamp = 0; //不使用絕對時間 packet->m_hasAbsTimestamp = 0; packet->m_headerType = RTMP_PACKET_SIZE_MEDIUM; videoCallback(packet); }
- 發(fā)送幀信息
-
void VideoChannel::sendFrame(int type, uint8_t *payload, int i_payload) { if (payload[2] == 0x00) { i_payload -= 4; payload += 4; } else { i_payload -= 3; payload += 3; } //看表 int bodySize = 9 + i_payload; RTMPPacket *packet = new RTMPPacket; // RTMPPacket_Alloc(packet, bodySize); packet->m_body[0] = 0x27; if(type == NAL_SLICE_IDR){ packet->m_body[0] = 0x17; LOGE("關(guān)鍵幀"); } //類型 packet->m_body[1] = 0x01; //時間戳 packet->m_body[2] = 0x00; packet->m_body[3] = 0x00; packet->m_body[4] = 0x00; //數(shù)據(jù)長度 int 4個字節(jié) packet->m_body[5] = (i_payload >> 24) & 0xff; packet->m_body[6] = (i_payload >> 16) & 0xff; packet->m_body[7] = (i_payload >> 8) & 0xff; packet->m_body[8] = (i_payload) & 0xff; //圖片數(shù)據(jù) memcpy(&packet->m_body[9], payload, i_payload); packet->m_hasAbsTimestamp = 0; packet->m_nBodySize = bodySize; packet->m_packetType = RTMP_PACKET_TYPE_VIDEO; packet->m_nChannel = 0x10; packet->m_headerType = RTMP_PACKET_SIZE_LARGE; videoCallback(packet); }
?
?文章來源:http://www.zghlxwxcb.cn/news/detail-431826.html
-
鏈接:https://pan.baidu.com/s/101sPDxMVJd9XeC7JWsmANw?
提取碼:bra3?
?文章來源地址http://www.zghlxwxcb.cn/news/detail-431826.html
到了這里,關(guān)于RTMP直播推流(一)視頻推流的文章就介紹完了。如果您還想了解更多內(nèi)容,請在右上角搜索TOY模板網(wǎng)以前的文章或繼續(xù)瀏覽下面的相關(guān)文章,希望大家以后多多支持TOY模板網(wǎng)!