初始化mediacodec
//寬高根據(jù)攝像頭分辨率設(shè)置
private int Width = 1280;
private int Height = 720;
private MediaCodec mediaCodec;
private ByteBuffer[] inputBuffers;
private void initMediaCodec(Surface surface) {
try {
Log.d(TAG, "onGetNetVideoData: ");
//創(chuàng)建解碼器 H264的Type為 AAC
mediaCodec = MediaCodec.createDecoderByType("video/avc");
//創(chuàng)建配置
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", Width, Height);
//設(shè)置解碼預(yù)期的幀速率【以幀/秒為單位的視頻格式的幀速率的鍵】
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 20);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, COLOR_FormatYUV420SemiPlanar);//
// byte[] headerSps = {0, 0, 0, 1, 103, 66, 0, 41, -115, -115, 64, 80, 30, -48, 15, 8, -124, 83, -128};
// byte[] headerPps = {0, 0, 0, 1, 104, -54, 67, -56};
//
// mediaFormat.setByteBuffer("csd-0", ByteBuffer.wrap(headerSps));
// mediaFormat.setByteBuffer("csd-1", ByteBuffer.wrap(headerPps));
//配置綁定mediaFormat和surface
mediaCodec.configure(mediaFormat, null, null, 0);
mediaCodec.start();
} catch (IOException e) {
e.printStackTrace();
//創(chuàng)建解碼失敗
Log.e(TAG, "創(chuàng)建解碼失敗");
}
inputBuffers = mediaCodec.getInputBuffers();
}
處理數(shù)據(jù),解碼h264數(shù)據(jù)為yuv格式
這里傳入的是h264格式的實(shí)時(shí)視頻流數(shù)據(jù)。
private void onFrame(byte[] buf, int offset, int length) {
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
//查詢10000毫秒后,如果dSP芯片的buffer全部被占用,返回-1;存在則大于0
int inIndex = mediaCodec.dequeueInputBuffer(10000);
if (inIndex >= 0) {
//根據(jù)返回的index拿到可以用的buffer
ByteBuffer byteBuffer = inputBuffers[inIndex];
//清空緩存
byteBuffer.clear();
//開始為buffer填充數(shù)據(jù)
byteBuffer.put(buf);
//填充數(shù)據(jù)后通知mediacodec查詢inIndex索引的這個(gè)buffer,
mediaCodec.queueInputBuffer(inIndex, 0, length, mCount * 20, 0);
mCount++;
} else {
Log.i(TAG, "inIndex < 0");
//等待查詢空的buffer
return;
}
//mediaCodec 查詢 "mediaCodec的輸出方隊(duì)列"得到索引
int outIndex = mediaCodec.dequeueOutputBuffer(info, 10000);
Log.e(TAG, "解碼輸出outIndex " + outIndex);
if (outIndex >= 0) {
//dsp的byteBuffer無法直接使用
ByteBuffer byteBuffer = mediaCodec.getOutputBuffer(outIndex);
//設(shè)置偏移量
byteBuffer.position(info.offset);
byteBuffer.limit(info.size + info.offset);
byte[] ba = new byte[byteBuffer.remaining()];
byteBuffer.get(ba);
//需要預(yù)先分配與NV12相同大小的字節(jié)數(shù)組
byte[] yuv = new byte[ba.length];
//不確定是什么顏色格式,挨個(gè)試的
//convertI420ToNV21(ba, yuv, Width, Height);
//convertYV12toNV21(ba, yuv, Width, Height);
convertNV12toNV21(ba, yuv, Width, Height);
NV21Data(yuv);
//檢查所支持的顏色格式
// MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType("video/avc");
// for (int i = 0; i < capabilities.colorFormats.length; i++) {
// int format = capabilities.colorFormats[i];
//
// //華為平板:COLOR_FormatYUV420SemiPlanar、COLOR_FormatYUV420Planar
// //魅族手機(jī):COLOR_FormatYUV420SemiPlanar
// //rk3588s: COLOR_FormatYUV420Planar、COLOR_FormatYUV420Flexible、COLOR_FormatYUV420PackedSemiPlanar、COLOR_FormatYUV420SemiPlanar
// switch (format) {
// case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar://(對(duì)應(yīng) I420 or YV12)
// Log.i("COLOR_Format_TAG", "=========COLOR_FormatYUV420Planar");
// byte[] convertNv21YUV420Planar = new byte[ba.length];
// //不確定是什么顏色格式,挨個(gè)試的
convertI420ToNV21(ba, convertNv21YUV420Planar, Width, Height);
convertYV12toNV21(ba, convertNv21YUV420Planar, Width, Height);
// long l1 = System.currentTimeMillis();
// convertNV12toNV21(ba, convertNv21YUV420Planar, Width, Height);
// Log.i("耗時(shí)測(cè)試", "轉(zhuǎn)為nv21的耗時(shí): " + (System.currentTimeMillis() - l1));
// long l2 = System.currentTimeMillis();
// NV21Data(convertNv21YUV420Planar);
// Log.i("耗時(shí)測(cè)試", "識(shí)別耗時(shí): " + (System.currentTimeMillis() - l2));
// continue;
//
// case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar://NV12
// Log.i("COLOR_Format_TAG", "=======COLOR_FormatYUV420SemiPlanar");
// byte[] nv21YUV420SemiPlanar = new byte[ba.length];
// convertNV12toNV21(ba, nv21YUV420SemiPlanar, Width, Height);
// NV21Data(nv21YUV420SemiPlanar);
//
// continue;
// case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
// Log.i("COLOR_Format_TAG", "=======COLOR_FormatYUV420PackedSemiPlanar");
// byte[] nv21YUV420PackedSemiPlanar = new byte[ba.length];
// convertNV12toNV21(ba, nv21YUV420PackedSemiPlanar, Width, Height);
// NV21Data(nv21YUV420PackedSemiPlanar);
// continue;
// case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible:
// byte[] nv21YUV420YUV420Flexible = new byte[ba.length];
// convertNV12toNV21(ba, nv21YUV420YUV420Flexible, Width, Height);
// NV21Data(nv21YUV420YUV420Flexible);
// Log.i("COLOR_Format_TAG", "=======COLOR_FormatYUV420Flexible");
// continue;
// default:
// continue;
//
// }
//
// }
//如果surface綁定了,則直接輸入到surface渲染并釋放
mediaCodec.releaseOutputBuffer(outIndex, false);
} else {
Log.e(TAG, "沒有解碼成功");
}
}
處理獲取到的nv21顏色格式的yuv數(shù)據(jù)
private int printImageStatus = 0;
private void NV21Data(byte[] nv21) {
//將nv21視頻流數(shù)據(jù)傳入YuvImage中,轉(zhuǎn)換成bitmap之后,顯示在imageview上、
//或者保存為png圖片到本地,如果不出現(xiàn)灰色、不出現(xiàn)藍(lán)色圖像和紅色圖像顏色顛倒,
//圖像顯示正常,則說明是標(biāo)準(zhǔn)的nv21格式視頻流數(shù)據(jù)
YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, Width, Height, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
yuvImage.compressToJpeg(new Rect(0, 0, Width, Height), 100, baos);
byte[] data = baos.toByteArray();
Log.i(TAG, "NV21Data-data: " + data.length);
Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
if (bitmap != null) {
runOnUiThread(new Runnable() {
@Override
public void run() {
mIvShowImage.setImageBitmap(bitmap);
}
});
//保存bitmap為png圖片
if (printImageStatus == 0) {
printImageStatus = 1;
try {
File myCaptureFile = new File(Environment.getExternalStorageDirectory(), "img.png");
BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(myCaptureFile));
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, bos);
bos.flush();
bos.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
?yuv視頻數(shù)據(jù)顏色格式轉(zhuǎn)換
public static void convertI420ToNV21(byte[] i420, byte[] nv21, int width, int height) {
System.arraycopy(i420, 0, nv21, 0, width * height);
int offset = width * height;
for (int i = 0; i < width * height / 4; i++) {
nv21[offset + 2 * i] = i420[offset + i + width * height / 4];
nv21[offset + 2 * i + 1] = i420[offset + i];
}
}
public static void convertYV12toNV21(byte[] yv12, byte[] nv21, int width, int height) {
int size = width * height;
int vOffset = size;
int uOffset = size + (size / 4);
// Copy Y channel as it is
System.arraycopy(yv12, 0, nv21, 0, size);
for (int i = 0; i < size / 4; i++) {
nv21[vOffset + (i * 2)] = yv12[vOffset + i]; // V
nv21[vOffset + (i * 2) + 1] = yv12[uOffset + i]; // U
}
}
public static void convertNV12toNV21(byte[] nv12, byte[] nv21, int width, int height) {
int size = width * height;
int offset = size;
// copy Y channel as it is
System.arraycopy(nv12, 0, nv21, 0, offset);
for (int i = 0; i < size / 4; i++) {
nv21[offset + (i * 2) + 1] = nv12[offset + (i * 2)]; // U
nv21[offset + (i * 2)] = nv12[offset + (i * 2) + 1]; // V
}
}
h264實(shí)時(shí)視頻流的數(shù)據(jù)來源
@Override
public void onPacketEvent(byte[] data) {
onFrame(data, 0, data.length);
//寫入h264視頻流到sdcard中
//wirte2file(data, data.length);
}
寫入h264視頻流到sdcard中文章來源:http://www.zghlxwxcb.cn/news/detail-799351.html
private BufferedOutputStream BufOs = null;
private File destfile = null;
private FileOutputStream destfs = null;
private String dsetfilePath = Environment.getExternalStorageDirectory() + "/" + "test.h264";
private void wirte2file(byte[] buf, int length) {
if (isStart) {
if (BufOs == null) {
destfile = new File(dsetfilePath);
try {
destfs = new FileOutputStream(destfile);
BufOs = new BufferedOutputStream(destfs);
Log.d(TAG, "wirte2file-new ");
} catch (FileNotFoundException e) {
// TODO: handle exception
Log.i("TRACK", "initerro" + e.getMessage());
Log.d(TAG, "wirte2file-FileNotFoundException:" + e.getMessage());
e.printStackTrace();
}
}
try {
BufOs.write(buf, 0, length);
BufOs.flush();
Log.d(TAG, "wirte2file-write");
} catch (Exception e) {
Log.d(TAG, "wirte2file-e: " + e.getMessage());
// TODO: handle exception
}
}
}
private boolean isStart;
public void onStop(View view) {
isStart = false;
Toast.makeText(this, "停止保存", Toast.LENGTH_SHORT).show();
}
public void onStart(View view) {
isStart = true;
Toast.makeText(this, "開始保存", Toast.LENGTH_SHORT).show();
}
rtsp獲取h264實(shí)時(shí)視頻流數(shù)據(jù)
public class FFDemuxJava {
static {
System.loadLibrary("demux");
}
private long m_handle = 0;
private EventCallback mEventCallback = null;
public void init(String url) {
m_handle = native_Init(url);
}
public void Start() {
native_Start(m_handle);
}
public void stop() {
native_Stop(m_handle);
}
public void unInit() {
native_UnInit(m_handle);
}
public void addEventCallback(EventCallback callback) {
mEventCallback = callback;
}
private void playerEventCallback(int msgType, float msgValue) {
if(mEventCallback != null)
mEventCallback.onMessageEvent(msgType, msgValue);
}
private void packetEventCallback(byte[]data) {
if(mEventCallback != null)
mEventCallback.onPacketEvent(data);
}
private native long native_Init(String url);
private native void native_Start(long playerHandle);
private native void native_Stop(long playerHandle);
private native void native_UnInit(long playerHandle);
public interface EventCallback {
void onMessageEvent(int msgType, float msgValue);
void onPacketEvent(byte []data);
}
}
?編寫C代碼加載ffmpeg庫
#include <jni.h>
#include <string>
#include "FFBridge.h"
extern "C"
{
#include <libavutil/time.h>
#include <libavcodec/avcodec.h>
#include <libavcodec/packet.h>
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavutil/opt.h>
};
extern "C" JNIEXPORT jstring JNICALL
Java_com_qmcy_demux_MainActivity_stringFromJNI(
JNIEnv* env,
jobject /* this */) {
std::string hello = "Hello from C++";
return env->NewStringUTF(hello.c_str());
}
extern "C" JNIEXPORT jstring JNICALL
Java_com_qmcy_demux_MainActivity_GetVersion(
JNIEnv* env,
jobject /* this */) {
char strBuffer[1024 * 4] = {0};
strcat(strBuffer, "libavcodec : ");
strcat(strBuffer, AV_STRINGIFY(LIBAVCODEC_VERSION));
strcat(strBuffer, "\nlibavformat : ");
strcat(strBuffer, AV_STRINGIFY(LIBAVFORMAT_VERSION));
strcat(strBuffer, "\nlibavutil : ");
strcat(strBuffer, AV_STRINGIFY(LIBAVUTIL_VERSION));
strcat(strBuffer, "\nlibavfilter : ");
strcat(strBuffer, AV_STRINGIFY(LIBAVFILTER_VERSION));
strcat(strBuffer, "\nlibswresample : ");
strcat(strBuffer, AV_STRINGIFY(LIBSWRESAMPLE_VERSION));
strcat(strBuffer, "\nlibswscale : ");
strcat(strBuffer, AV_STRINGIFY(LIBSWSCALE_VERSION));
strcat(strBuffer, "\navcodec_configure : \n");
strcat(strBuffer, avcodec_configuration());
strcat(strBuffer, "\navcodec_license : ");
strcat(strBuffer, avcodec_license());
//LOGCATE("GetFFmpegVersion\n%s", strBuffer);
return env->NewStringUTF(strBuffer);
}
extern "C" JNIEXPORT jlong JNICALL Java_com_qmcy_demux_FFDemuxJava_native_1Init
(JNIEnv *env, jobject obj, jstring jurl)
{
const char* url = env->GetStringUTFChars(jurl, nullptr);
FFBridge *bridge = new FFBridge();
bridge->Init(env, obj, const_cast<char *>(url));
env->ReleaseStringUTFChars(jurl, url);
return reinterpret_cast<jlong>(bridge);
}
extern "C"
JNIEXPORT void JNICALL Java_com_qmcy_demux_FFDemuxJava_native_1Start
(JNIEnv *env, jobject obj, jlong handle)
{
if(handle != 0)
{
FFBridge *bridge = reinterpret_cast<FFBridge *>(handle);
bridge->Start();
}
}
extern "C"
JNIEXPORT void JNICALL Java_com_qmcy_demux_FFDemuxJava_native_1Stop
(JNIEnv *env, jobject obj, jlong handle)
{
if(handle != 0)
{
FFBridge *bridge = reinterpret_cast<FFBridge *>(handle);
bridge->Stop();
}
}
extern "C"
JNIEXPORT void JNICALL Java_com_qmcy_demux_FFDemuxJava_native_1UnInit
(JNIEnv *env, jobject obj, jlong handle)
{
if(handle != 0)
{
FFBridge *bridge = reinterpret_cast<FFBridge *>(handle);
bridge->UnInit();
delete bridge;
}
}
源碼地址https://gitee.com/baipenggui/demux_demo.git文章來源地址http://www.zghlxwxcb.cn/news/detail-799351.html
到了這里,關(guān)于Android MediaCodec將h264實(shí)時(shí)視頻流數(shù)據(jù)解碼為yuv,并轉(zhuǎn)換yuv的顏色格式為nv21的文章就介紹完了。如果您還想了解更多內(nèi)容,請(qǐng)?jiān)谟疑辖撬阉鱐OY模板網(wǎng)以前的文章或繼續(xù)瀏覽下面的相關(guān)文章,希望大家以后多多支持TOY模板網(wǎng)!