- //宽高根据摄像头分辨率设置
- private int Width = 1280;
- private int Height = 720;
- private MediaCodec mediaCodec;
- private ByteBuffer[] inputBuffers;
-
- private void initMediaCodec(Surface surface) {
-
- try {
- Log.d(TAG, "onGetNetVideoData: ");
- //创建解码器 H264的Type为 AAC
- mediaCodec = MediaCodec.createDecoderByType("video/avc");
- //创建配置
- MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", Width, Height);
- //设置解码预期的帧速率【以帧/秒为单位的视频格式的帧速率的键】
- mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 20);
-
- mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, COLOR_FormatYUV420SemiPlanar);//
- // byte[] headerSps = {0, 0, 0, 1, 103, 66, 0, 41, -115, -115, 64, 80, 30, -48, 15, 8, -124, 83, -128};
- // byte[] headerPps = {0, 0, 0, 1, 104, -54, 67, -56};
- //
- // mediaFormat.setByteBuffer("csd-0", ByteBuffer.wrap(headerSps));
- // mediaFormat.setByteBuffer("csd-1", ByteBuffer.wrap(headerPps));
- //配置绑定mediaFormat和surface
- mediaCodec.configure(mediaFormat, null, null, 0);
- mediaCodec.start();
- } catch (IOException e) {
- e.printStackTrace();
- //创建解码失败
- Log.e(TAG, "创建解码失败");
- }
-
- inputBuffers = mediaCodec.getInputBuffers();
-
- }
这里传入的是h264格式的实时视频流数据。
- private void onFrame(byte[] buf, int offset, int length) {
-
- MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
- //查询10000毫秒后,如果dSP芯片的buffer全部被占用,返回-1;存在则大于0
- int inIndex = mediaCodec.dequeueInputBuffer(10000);
- if (inIndex >= 0) {
- //根据返回的index拿到可以用的buffer
- ByteBuffer byteBuffer = inputBuffers[inIndex];
- //清空缓存
- byteBuffer.clear();
- //开始为buffer填充数据
- byteBuffer.put(buf);
- //填充数据后通知mediacodec查询inIndex索引的这个buffer,
- mediaCodec.queueInputBuffer(inIndex, 0, length, mCount * 20, 0);
- mCount++;
- } else {
- Log.i(TAG, "inIndex < 0");
- //等待查询空的buffer
- return;
- }
- //mediaCodec 查询 "mediaCodec的输出方队列"得到索引
- int outIndex = mediaCodec.dequeueOutputBuffer(info, 10000);
- Log.e(TAG, "解码输出outIndex " + outIndex);
- if (outIndex >= 0) {
-
- //dsp的byteBuffer无法直接使用
- ByteBuffer byteBuffer = mediaCodec.getOutputBuffer(outIndex);
- //设置偏移量
- byteBuffer.position(info.offset);
- byteBuffer.limit(info.size + info.offset);
-
- byte[] ba = new byte[byteBuffer.remaining()];
- byteBuffer.get(ba);
- //需要预先分配与NV12相同大小的字节数组
- byte[] yuv = new byte[ba.length];
- //不确定是什么颜色格式,挨个试的
- //convertI420ToNV21(ba, yuv, Width, Height);
- //convertYV12toNV21(ba, yuv, Width, Height);
- convertNV12toNV21(ba, yuv, Width, Height);
- NV21Data(yuv);
- //检查所支持的颜色格式
- // MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType("video/avc");
- // for (int i = 0; i < capabilities.colorFormats.length; i++) {
- // int format = capabilities.colorFormats[i];
- //
- // //华为平板:COLOR_FormatYUV420SemiPlanar、COLOR_FormatYUV420Planar
- // //魅族手机:COLOR_FormatYUV420SemiPlanar
- // //rk3588s: COLOR_FormatYUV420Planar、COLOR_FormatYUV420Flexible、COLOR_FormatYUV420PackedSemiPlanar、COLOR_FormatYUV420SemiPlanar
- // switch (format) {
- // case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar://(对应 I420 or YV12)
- // Log.i("COLOR_Format_TAG", "=========COLOR_FormatYUV420Planar");
- // byte[] convertNv21YUV420Planar = new byte[ba.length];
- // //不确定是什么颜色格式,挨个试的
- convertI420ToNV21(ba, convertNv21YUV420Planar, Width, Height);
- convertYV12toNV21(ba, convertNv21YUV420Planar, Width, Height);
- // long l1 = System.currentTimeMillis();
- // convertNV12toNV21(ba, convertNv21YUV420Planar, Width, Height);
- // Log.i("耗时测试", "转为nv21的耗时: " + (System.currentTimeMillis() - l1));
- // long l2 = System.currentTimeMillis();
- // NV21Data(convertNv21YUV420Planar);
- // Log.i("耗时测试", "识别耗时: " + (System.currentTimeMillis() - l2));
- // continue;
- //
- // case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar://NV12
- // Log.i("COLOR_Format_TAG", "=======COLOR_FormatYUV420SemiPlanar");
- // byte[] nv21YUV420SemiPlanar = new byte[ba.length];
- // convertNV12toNV21(ba, nv21YUV420SemiPlanar, Width, Height);
- // NV21Data(nv21YUV420SemiPlanar);
- //
- // continue;
- // case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
- // Log.i("COLOR_Format_TAG", "=======COLOR_FormatYUV420PackedSemiPlanar");
- // byte[] nv21YUV420PackedSemiPlanar = new byte[ba.length];
- // convertNV12toNV21(ba, nv21YUV420PackedSemiPlanar, Width, Height);
- // NV21Data(nv21YUV420PackedSemiPlanar);
- // continue;
- // case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible:
- // byte[] nv21YUV420YUV420Flexible = new byte[ba.length];
- // convertNV12toNV21(ba, nv21YUV420YUV420Flexible, Width, Height);
- // NV21Data(nv21YUV420YUV420Flexible);
- // Log.i("COLOR_Format_TAG", "=======COLOR_FormatYUV420Flexible");
- // continue;
- // default:
- // continue;
- //
- // }
- //
- // }
-
- //如果surface绑定了,则直接输入到surface渲染并释放
- mediaCodec.releaseOutputBuffer(outIndex, false);
- } else {
- Log.e(TAG, "没有解码成功");
- }
- }
- private int printImageStatus = 0;
- private void NV21Data(byte[] nv21) {
- //将nv21视频流数据传入YuvImage中,转换成bitmap之后,显示在imageview上、
- //或者保存为png图片到本地,如果不出现灰色、不出现蓝色图像和红色图像颜色颠倒,
- //图像显示正常,则说明是标准的nv21格式视频流数据
- YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, Width, Height, null);
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- yuvImage.compressToJpeg(new Rect(0, 0, Width, Height), 100, baos);
- byte[] data = baos.toByteArray();
- Log.i(TAG, "NV21Data-data: " + data.length);
-
- Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
-
- if (bitmap != null) {
- runOnUiThread(new Runnable() {
- @Override
- public void run() {
- mIvShowImage.setImageBitmap(bitmap);
- }
- });
- //保存bitmap为png图片
- if (printImageStatus == 0) {
- printImageStatus = 1;
- try {
- File myCaptureFile = new File(Environment.getExternalStorageDirectory(), "img.png");
- BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(myCaptureFile));
- bitmap.compress(Bitmap.CompressFormat.JPEG, 100, bos);
- bos.flush();
- bos.close();
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
- }
- }
- public static void convertI420ToNV21(byte[] i420, byte[] nv21, int width, int height) {
- System.arraycopy(i420, 0, nv21, 0, width * height);
- int offset = width * height;
- for (int i = 0; i < width * height / 4; i++) {
- nv21[offset + 2 * i] = i420[offset + i + width * height / 4];
- nv21[offset + 2 * i + 1] = i420[offset + i];
- }
- }
-
- public static void convertYV12toNV21(byte[] yv12, byte[] nv21, int width, int height) {
- int size = width * height;
- int vOffset = size;
- int uOffset = size + (size / 4);
-
- // Copy Y channel as it is
- System.arraycopy(yv12, 0, nv21, 0, size);
-
- for (int i = 0; i < size / 4; i++) {
- nv21[vOffset + (i * 2)] = yv12[vOffset + i]; // V
- nv21[vOffset + (i * 2) + 1] = yv12[uOffset + i]; // U
- }
- }
-
-
- public static void convertNV12toNV21(byte[] nv12, byte[] nv21, int width, int height) {
- int size = width * height;
- int offset = size;
-
- // copy Y channel as it is
- System.arraycopy(nv12, 0, nv21, 0, offset);
-
- for (int i = 0; i < size / 4; i++) {
- nv21[offset + (i * 2) + 1] = nv12[offset + (i * 2)]; // U
- nv21[offset + (i * 2)] = nv12[offset + (i * 2) + 1]; // V
- }
- }
h264实时视频流的数据来源
- @Override
- public void onPacketEvent(byte[] data) {
-
-
- onFrame(data, 0, data.length);
- //写入h264视频流到sdcard中
- //wirte2file(data, data.length);
-
- }
写入h264视频流到sdcard中
- private BufferedOutputStream BufOs = null;
- private File destfile = null;
- private FileOutputStream destfs = null;
- private String dsetfilePath = Environment.getExternalStorageDirectory() + "/" + "test.h264";
-
- private void wirte2file(byte[] buf, int length) {
- if (isStart) {
- if (BufOs == null) {
- destfile = new File(dsetfilePath);
- try {
- destfs = new FileOutputStream(destfile);
- BufOs = new BufferedOutputStream(destfs);
- Log.d(TAG, "wirte2file-new ");
- } catch (FileNotFoundException e) {
- // TODO: handle exception
- Log.i("TRACK", "initerro" + e.getMessage());
- Log.d(TAG, "wirte2file-FileNotFoundException:" + e.getMessage());
- e.printStackTrace();
- }
- }
-
- try {
- BufOs.write(buf, 0, length);
- BufOs.flush();
- Log.d(TAG, "wirte2file-write");
- } catch (Exception e) {
- Log.d(TAG, "wirte2file-e: " + e.getMessage());
- // TODO: handle exception
- }
-
- }
- }
-
- private boolean isStart;
-
- public void onStop(View view) {
- isStart = false;
- Toast.makeText(this, "停止保存", Toast.LENGTH_SHORT).show();
- }
-
- public void onStart(View view) {
- isStart = true;
- Toast.makeText(this, "开始保存", Toast.LENGTH_SHORT).show();
- }
- public class FFDemuxJava {
-
- static {
- System.loadLibrary("demux");
- }
-
- private long m_handle = 0;
- private EventCallback mEventCallback = null;
-
- public void init(String url) {
- m_handle = native_Init(url);
- }
-
- public void Start() {
- native_Start(m_handle);
- }
-
- public void stop() {
- native_Stop(m_handle);
- }
-
- public void unInit() {
- native_UnInit(m_handle);
- }
-
- public void addEventCallback(EventCallback callback) {
- mEventCallback = callback;
- }
-
-
- private void playerEventCallback(int msgType, float msgValue) {
- if(mEventCallback != null)
- mEventCallback.onMessageEvent(msgType, msgValue);
-
- }
-
-
- private void packetEventCallback(byte[]data) {
- if(mEventCallback != null)
- mEventCallback.onPacketEvent(data);
-
- }
-
-
-
- private native long native_Init(String url);
-
- private native void native_Start(long playerHandle);
-
- private native void native_Stop(long playerHandle);
-
- private native void native_UnInit(long playerHandle);
-
-
- public interface EventCallback {
- void onMessageEvent(int msgType, float msgValue);
- void onPacketEvent(byte []data);
- }
-
- }
- #include
- #include
-
- #include "FFBridge.h"
-
- extern "C"
- {
- #include
- #include
- #include
- #include
- #include
- #include
- #include
- #include
- };
-
- extern "C" JNIEXPORT jstring JNICALL
- Java_com_qmcy_demux_MainActivity_stringFromJNI(
- JNIEnv* env,
- jobject /* this */) {
- std::string hello = "Hello from C++";
- return env->NewStringUTF(hello.c_str());
- }
-
-
- extern "C" JNIEXPORT jstring JNICALL
- Java_com_qmcy_demux_MainActivity_GetVersion(
- JNIEnv* env,
- jobject /* this */) {
- char strBuffer[1024 * 4] = {0};
- strcat(strBuffer, "libavcodec : ");
- strcat(strBuffer, AV_STRINGIFY(LIBAVCODEC_VERSION));
- strcat(strBuffer, "\nlibavformat : ");
- strcat(strBuffer, AV_STRINGIFY(LIBAVFORMAT_VERSION));
- strcat(strBuffer, "\nlibavutil : ");
- strcat(strBuffer, AV_STRINGIFY(LIBAVUTIL_VERSION));
- strcat(strBuffer, "\nlibavfilter : ");
- strcat(strBuffer, AV_STRINGIFY(LIBAVFILTER_VERSION));
- strcat(strBuffer, "\nlibswresample : ");
- strcat(strBuffer, AV_STRINGIFY(LIBSWRESAMPLE_VERSION));
- strcat(strBuffer, "\nlibswscale : ");
- strcat(strBuffer, AV_STRINGIFY(LIBSWSCALE_VERSION));
- strcat(strBuffer, "\navcodec_configure : \n");
- strcat(strBuffer, avcodec_configuration());
- strcat(strBuffer, "\navcodec_license : ");
- strcat(strBuffer, avcodec_license());
- //LOGCATE("GetFFmpegVersion\n%s", strBuffer);
- return env->NewStringUTF(strBuffer);
- }
-
-
- extern "C" JNIEXPORT jlong JNICALL Java_com_qmcy_demux_FFDemuxJava_native_1Init
- (JNIEnv *env, jobject obj, jstring jurl)
- {
- const char* url = env->GetStringUTFChars(jurl, nullptr);
- FFBridge *bridge = new FFBridge();
- bridge->Init(env, obj, const_cast<char *>(url));
- env->ReleaseStringUTFChars(jurl, url);
- return reinterpret_cast
(bridge); - }
-
- extern "C"
- JNIEXPORT void JNICALL Java_com_qmcy_demux_FFDemuxJava_native_1Start
- (JNIEnv *env, jobject obj, jlong handle)
- {
- if(handle != 0)
- {
- FFBridge *bridge = reinterpret_cast
(handle); - bridge->Start();
- }
-
- }
-
- extern "C"
- JNIEXPORT void JNICALL Java_com_qmcy_demux_FFDemuxJava_native_1Stop
- (JNIEnv *env, jobject obj, jlong handle)
- {
- if(handle != 0)
- {
- FFBridge *bridge = reinterpret_cast
(handle); - bridge->Stop();
- }
- }
-
-
- extern "C"
- JNIEXPORT void JNICALL Java_com_qmcy_demux_FFDemuxJava_native_1UnInit
- (JNIEnv *env, jobject obj, jlong handle)
- {
- if(handle != 0)
- {
- FFBridge *bridge = reinterpret_cast
(handle); - bridge->UnInit();
- delete bridge;
- }
- }