一、目的
在Android上使用FFmpeg将摄像头采集的YUV裸流编码为h264。
二、环境准备
1、使用FFmpeg动态库(这个动态库需要有libx264的实现,否则可能会出现寻找编码器失败异常)。关于如何获得这样的一个动态库可以参考http://blog.csdn.net/a992036795/article/details/53941436
2、Android开发环境(我用的是Android Studio2.2.3) 和最新的ndk。
三、思路
1、初始化ffmpeg的一些配置。
2、调用系统摄像头设置参数使用mCamera.setPreviewCallbackWithBuffer();设置回调接口用来接受YUV数据。
3、将摄像头获得的YUV数据(默认是NV21)转化成YUV420P格式
3、将获得的修改后的数据传给编码器,进行编码
4、输出
四、流程
这里写图片描述

流程基本分三大步
1、初始化(包括打开输出文件,设置参数,寻找编码器、写入头信息等。)
2、实时传入数据进行编码
3、刷帧,并写入尾部信息。释放资源

我用三个jni方法分别对应这三步:

    /**     * 初始化。     *     * @param destUrl 目标url     * @param w       宽     * @param h       高     * @return 结果     */    public static native int init(String destUrl, int w, int h);    /**     * 传入数据。     *      * @param bytes     * @param w     * @param h     * @return     */    public static native int push(byte[] bytes,int w,int h);    /**     * 停止     * @return     */    public static native int stop();

五、代码
1、java

package com.blueberry.x264;import android.app.Activity;import android.graphics.ImageFormat;import android.hardware.Camera;import android.os.Bundle;import android.support.annotation.Nullable;import android.support.v7.app.AppCompatActivity;import android.util.Log;import android.view.Surface;import android.view.SurfaceHolder;import android.view.SurfaceView;import android.view.View;import android.widget.Button;import java.io.IOException;import java.util.Arrays;import java.util.List;import static android.hardware.Camera.Parameters.FLASH_MODE_AUTO;import static android.hardware.Camera.Parameters.PREVIEW_FPS_MAX_INDEX;import static android.hardware.Camera.Parameters.PREVIEW_FPS_MIN_INDEX;/** * Created by blueberry on 1/3/2017. */public class CameraActivity extends AppCompatActivity implements SurfaceHolder.Callback2,        Camera.PreviewCallback {    private static final String TAG = "CameraActivity";    private Button btnStart;    private SurfaceView mSurfaceView;    private SurfaceHolder mSurfaceHolder;    private Camera mCamera;    @Override    protected void onCreate(@Nullable Bundle savedInstanceState) {        super.onCreate(savedInstanceState);        setContentView(R.layout.activity_camera);        btnStart = (Button) findViewById(R.id.btn_start);        mSurfaceView = (SurfaceView) findViewById(R.id.surface_view);        mSurfaceHolder = mSurfaceView.getHolder();        mSurfaceHolder.addCallback(this);        btnStart.setOnClickListener(new View.OnClickListener() {            @Override            public void onClick(View v) {                start();            }        });    }    private boolean isPublish;    private boolean isStarted;    private void start() {        if (isStarted) {            stop();            isStarted = false;        } else {            isStarted = true;            // init            isPublish = true;            Pusher.init("/sdcard/camera.h264", size.width, size.height);        }    }    private void stop() {        isPublish = false;        Pusher.stop();    }    private void openCamera() {        try {            this.mCamera = Camera.open();        } catch (RuntimeException e) {            throw new RuntimeException("打开摄像头失败", e);        }    }    private Camera.Size size;    private boolean isPreview;    private void initCamera() {        if (this.mCamera == null) {            openCamera();        }        setParameters();        setCameraDisplayOrientation(this, Camera.CameraInfo.CAMERA_FACING_BACK, mCamera);        int buffSize = size.width * size.height * ImageFormat.getBitsPerPixel(ImageFormat.NV21) / 8;        mCamera.addCallbackBuffer(new byte[buffSize]);        mCamera.setPreviewCallbackWithBuffer(this);        try {            mCamera.setPreviewDisplay(mSurfaceHolder);        } catch (IOException e) {            e.printStackTrace();        }        if (isPreview) {            mCamera.stopPreview();            isPreview = false;        }        mCamera.startPreview();        isPreview = true;    }    public static void setCameraDisplayOrientation(Activity activity,                                                   int cameraId, android.hardware.Camera camera) {        android.hardware.Camera.CameraInfo info =                new android.hardware.Camera.CameraInfo();        android.hardware.Camera.getCameraInfo(cameraId, info);        int rotation = activity.getWindowManager().getDefaultDisplay()                .getRotation();        int degrees = 0;        switch (rotation) {            case Surface.ROTATION_0:                degrees = 0;                break;            case Surface.ROTATION_90:                degrees = 90;                break;            case Surface.ROTATION_180:                degrees = 180;                break;            case Surface.ROTATION_270:                degrees = 270;                break;        }        int result;        if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {            result = (info.orientation + degrees) % 360;            result = (360 - result) % 360;  // compensate the mirror        } else {  // back-facing            result = (info.orientation - degrees + 360) % 360;        }        camera.setDisplayOrientation(result);    }    private void setParameters() {        Camera.Parameters parameters = mCamera.getParameters();        List supportedPreviewSizes = parameters.getSupportedPreviewSizes();        for (Camera.Size supportSize : supportedPreviewSizes) {            if (supportSize.width >= 160 && supportSize.width <= 240) {                this.size = supportSize;                Log.i(TAG, "setParameters: width:" + size.width + " ,height:" + size.height);                break;            }        }        int defFPS = 20 * 1000;        List<int[]> supportedPreviewFpsRange = parameters.getSupportedPreviewFpsRange();        int[] destRange = null;        for (int i = 0; i < supportedPreviewFpsRange.size(); i++) {            int[] range = supportedPreviewFpsRange.get(i);            if (range[PREVIEW_FPS_MAX_INDEX] >= defFPS) {                destRange = range;                Log.i(TAG, "setParameters: destRange:" + Arrays.toString(range));                break;            }        }        parameters.setPreviewFpsRange(destRange[PREVIEW_FPS_MIN_INDEX],                destRange[PREVIEW_FPS_MAX_INDEX]);        parameters.setPreviewSize(size.width, size.height);        parameters.setFlashMode(FLASH_MODE_AUTO);        parameters.setPreviewFormat(ImageFormat.NV21);        mCamera.setParameters(parameters);    }    @Override    public void surfaceRedrawNeeded(SurfaceHolder holder) {    }    @Override    public void surfaceCreated(SurfaceHolder holder) {    }    @Override    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {        initCamera();    }    @Override    public void surfaceDestroyed(SurfaceHolder holder) {    }    @Override    public void onPreviewFrame(final byte[] data, Camera camera) {        if (isPublish) {                Pusher.push(data,size.width,size.height);        }        int buffSize = size.width * size.height * ImageFormat.getBitsPerPixel(ImageFormat.NV21) / 8;        if (data == null) {            mCamera.addCallbackBuffer(new byte[buffSize]);        } else {            mCamera.addCallbackBuffer(data);        }    }}

jni:

public final class Pusher {    /**     * 初始化。     *     * @param destUrl 目标url     * @param w       宽     * @param h       高     * @return 结果     */    public static native int init(String destUrl, int w, int h);    /**     * 传入数据。     *     * @param bytes     * @param w     * @param h     * @return     */    public static native int push(byte[] bytes,int w,int h);    /**     * 停止     * @return     */    public static native int stop();}

c:

#include #include #include #include "libavutil/imgutils.h"#include "libavformat/avformat.h"#include "libavutil/time.h"#define LOGI(format, ...) \    __android_log_print(ANDROID_LOG_INFO, TAG,  format, ##__VA_ARGS__)#define LOGD(format, ...) \    __android_log_print(ANDROID_LOG_DEBUG,TAG,format,##__VA_ARGS__)#define LOGE(format, ...) \    __android_log_print(ANDROID_LOG_ERROR,TAG,format,##__VA_ARGS__)#define TAG "Push"#define FPS 10AVPacket avPacket;int size;AVFrame *avFrame;AVStream *video_st;AVCodecContext *avCodecContext;int fameCount = 0;AVFormatContext *ofmt_ctx;int64_t start_time;static int stop();static int init(const char *destUrl, int w, int h);static int push(uint8_t *bytes);void callback(void *ptr, int level, const char *fmt, va_list vl);JNIEXPORT jint JNICALLJava_com_blueberry_x264_Pusher_init(JNIEnv *env, jclass type, jstring destUrl_, jint w, jint h) {    const char *destUrl = (*env)->GetStringUTFChars(env, destUrl_, 0);    int ret = init(destUrl, w, h);    (*env)->ReleaseStringUTFChars(env, destUrl_, destUrl);    return ret;}JNIEXPORT jint JNICALLJava_com_blueberry_x264_Pusher_push(JNIEnv *env, jclass type, jbyteArray bytes_, jint w, jint h) {    jbyte *bytes = (*env)->GetByteArrayElements(env, bytes_, NULL);//    I420: YYYYYYYY UU VV    =>YUV420P//    YV12: YYYYYYYY VV UU    =>YUV420P//    NV12: YYYYYYYY UVUV     =>YUV420SP//    NV21: YYYYYYYY VUVU     =>YUV420SP    int ret = push((uint8_t *) bytes);    (*env)->ReleaseByteArrayElements(env, bytes_, bytes, 0);    return ret;}JNIEXPORT jint JNICALLJava_com_blueberry_x264_Pusher_stop(JNIEnv *env, jclass type) {    return stop();}void callback(void *ptr, int level, const char *fmt, va_list vl) {    FILE *f = fopen("/storage/emulated/0/avlog.txt", "a+");    if (f) {        vfprintf(f, fmt, vl);        fflush(f);        fclose(f);    }}static int flush_encoder(AVFormatContext *fmt_ctx, int streamIndex) {    int ret;    int got_frame;    AVPacket enc_pkt;    if (!(fmt_ctx->streams[streamIndex]->codec->codec->capabilities & CODEC_CAP_DELAY)) {        return 0;    }    while (1) {        enc_pkt.data = NULL;        enc_pkt.size = 0;        av_init_packet(&enc_pkt);        ret = avcodec_encode_video2(fmt_ctx->streams[streamIndex]->codec, &enc_pkt, NULL,                                    &got_frame);        av_frame_free(NULL);        if (ret < 0) {            break;        }        if (!got_frame) {            ret = 0;            return ret;        }        LOGI("Flush Encoder : Succeed to encoder 1 frame! \tsize:%5d\n", enc_pkt.size);        ret = av_write_frame(fmt_ctx, &enc_pkt);        if (ret < 0) {            break;        }    }    return ret;}static int stop() {    int ret;    ret = flush_encoder(ofmt_ctx, 0);    if (ret < 0) {        LOGE("Flush Encoder failed");        goto end;    }    av_write_trailer(ofmt_ctx);    end:    //Clean    if (video_st) {        avcodec_close(video_st->codec);        av_free(avFrame);    }    avio_close(ofmt_ctx->pb);    avformat_free_context(ofmt_ctx);    LOGI("stop----------------------");    return ret;}static int push(uint8_t *bytes) {    start_time = av_gettime();    int got_picture = 0;    static int i = 0;    int j = 0;    avFrame = av_frame_alloc();    int picture_size = av_image_get_buffer_size(avCodecContext->pix_fmt, avCodecContext->width,                                                avCodecContext->height, 1);    uint8_t buffers[picture_size];    av_image_fill_arrays(avFrame->data, avFrame->linesize, buffers, avCodecContext->pix_fmt,                         avCodecContext->width, avCodecContext->height, 1);    av_new_packet(&avPacket, picture_size);    size = avCodecContext->width * avCodecContext->height;    //安卓摄像头数据为NV21格式,此处将其转换为YUV420P格式    memcpy(avFrame->data[0], bytes, size); //Y    for (j = 0; j < size / 4; j++) {        *(avFrame->data[2] + j) = *(bytes + size + j * 2); // V        *(avFrame->data[1] + j) = *(bytes + size + j * 2 + 1); //U    }    int ret = avcodec_encode_video2(avCodecContext, &avPacket, avFrame, &got_picture);    LOGD("avcodec_encode_video2 spend time %ld", (int) ((av_gettime() - start_time) / 1000));    if (ret < 0) {        LOGE("Fail to avcodec_encode ! code:%d", ret);        return -1;    }    if (got_picture == 1) {        avPacket.pts = i++ * (video_st->time_base.den) / ((video_st->time_base.num) * FPS);        LOGI("Succeed to encode frame: %5d\tsize:%5d\n", fameCount, avPacket.size);        avPacket.stream_index = video_st->index;        avPacket.dts = avPacket.pts;        avPacket.duration = 1;        int64_t pts_time = AV_TIME_BASE * av_q2d(video_st->time_base);        int64_t now_time = av_gettime() - start_time;        if (pts_time > now_time) {            LOGD("等待");            av_usleep(pts_time - now_time);        }        av_write_frame(ofmt_ctx, &avPacket);        LOGD("av_write_frame spend time %ld", (int) (av_gettime() - start_time) / 1000);        av_free_packet(&avPacket);        fameCount++;    } else {        LOGE("唉~");    }    av_frame_free(&avFrame);}static int init(const char *destUrl, int w, int h) {    av_log_set_callback(callback);    av_register_all();    LOGD("resister_all");    AVOutputFormat *fmt;    int ret;    LOGI("ouput url: %s", destUrl);    avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", destUrl);    LOGD("allocl ofmt_ctx finished");    fmt = ofmt_ctx->oformat;    if ((ret = avio_open(&ofmt_ctx->pb, destUrl, AVIO_FLAG_READ_WRITE)) < 0) {        LOGE("avio_open error");        return -1;    }    video_st = avformat_new_stream(ofmt_ctx, NULL);    if (video_st == NULL) {        ret = -1;        return -1;    }    LOGD("new stream finished");    avCodecContext = video_st->codec;//    avCodecContext->codec_id = fmt->video_codec;    avCodecContext->codec_id = AV_CODEC_ID_H264;    avCodecContext->codec_type = AVMEDIA_TYPE_VIDEO;    avCodecContext->pix_fmt = AV_PIX_FMT_YUV420P;    avCodecContext->width = w;    avCodecContext->height = h;    // 目标的码率,即采样码率;显然,采码率越大,视频大小越大    avCodecContext->bit_rate = 400000; //400,000    //每250帧插入一个I帧,I帧越少,视频越小    avCodecContext->gop_size = 250;    // 帧率的基本单位用分数表示    avCodecContext->time_base.num = 1;    avCodecContext->time_base.den = FPS;    // 最大和最小量化系数    avCodecContext->qmin = 10;    avCodecContext->qmax = 51;    avCodecContext->max_b_frames = 3;    // Set Option    AVDictionary *param = 0;    //H.264    if (avCodecContext->codec_id == AV_CODEC_ID_H264) {        av_dict_set(¶m, "preset", "slow", 0);        av_dict_set(¶m, "tune", "zerolatency", 0);        LOGI("set h264 param finished");    }    //H.265    if (avCodecContext->codec_id == AV_CODEC_ID_H265) {        av_dict_set(¶m, "preset", "ultrafast", 0);        av_dict_set(¶m, "tune", "zero-latency", 0);        LOGI("set h265 param");    }    AVCodec *avCodec;    avCodec = avcodec_find_encoder(avCodecContext->codec_id);    if (NULL == avCodec) {        LOGE("寻找编码器失败..");        return -1;    }    if ((ret = avcodec_open2(avCodecContext, avCodec, ¶m)) < 0) {        LOGE("avcodec_open2 fail!");        return -1;    }    // Write File Header    avformat_write_header(ofmt_ctx, NULL);    return ret;}

注意这里我设置的帧率为10帧,并且设置的预览宽度不超过240,主要目的就是为了提高编码的效率。(我用小米2S测试这样勉强可以编码),测试这个方法avcodec_encode_video2
非常的耗时,如果我是1080P的图像时他可能需要等待1秒钟!!!目前刚接触ffmpeg不知道有什么办法可以优化。如有知道的大神可以指点指点。

参考:http://blog.csdn.net/leixiaohua1020/article/details/25430425
http://blog.csdn.net/beyond_cn/article/details/12998247

更多相关文章

  1. Android 使用摄像头。
  2. Android中通过EventBus传递消息数据
  3. adb shell下查看sqlite数据库
  4. Android 使用ArrayAdapter 加载Bean数据
  5. Android导入数据库
  6. Android 数据存储之 SQLit
  7. Android数据存储(一)
  8. Android调用摄像头识别图片的形状和颜色怎么做

随机推荐

  1. XML开发基础-XML元素
  2. 使用xmlhttp为网站增加域名查询功能的示
  3. XML开发基础-XML属性的代码详解
  4. XML结构与语法入门的具体分享
  5. XML开发基础-查看XML文件
  6. xml数据岛绑定时的超链接处理的示例代码
  7. XML开发基础-使用CSS显示XML的详解
  8. 详解XML与现代CGI应用程序的示例代码
  9. 详细介绍使用UTF-8对XML文档进行编码
  10. 详细介绍XML在Web应用中的优势体现