一個使用FFmpeg庫讀取3gp視頻的例子-Android中使用FFmpeg媒體庫(三)
原文:https://doandroid.info/?p=497
在續係列文章在32位的Ubuntu
11.04中為Android NDK r6編譯FFmpeg0.8.1版-Android中使用FFmpeg媒體庫(一)和在Android中通過jni方式使用編譯好的FFmpeg庫-Android中使用FFmpeg媒體庫(二)文章後,本文將根據github中churnlabs的一個開源項目,來深入展開說明如何使用FFmpeg庫進行多媒體的開發。
本文中的代碼來自於https://github.com/churnlabs/android-ffmpeg-sample,更多的可以參考這個項目代碼。我會在代碼中加一些自己的注釋。感謝作者churnlabs給我們提供這麼好的例子以供我們學習。
在Android的一些係統層應用開發大多數是采用jni的方式調用,另外對於一些比較吃CPU或者處理邏輯比較複雜的程序,也可以考慮使用jni方式來封裝。可以提高程序的執行效率。
本文涉及到以下幾個方麵:
1 將3gp文件push到模擬機器的sdcard中
2 寫jni代碼,內部調用ffmpeg庫的方法,編譯jni庫
3 loadLibrary生成的庫,然後撰寫相應的java代碼
4 執行程序,並查看最終運行結果。
最終程序的顯示效果如下:
1 使用eclipse的DDMS工具,將vid.3pg push到sdcard中
2 撰寫相應的jni文件
/*
* Copyright 2011 - Churn Labs, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* This is mostly based off of the FFMPEG tutorial:
* https://dranger.com/ffmpeg/
* With a few updates to support Android output mechanisms and to update
* places where the APIs have shifted.
*/
#include <jni.h>
#include <string.h>
#include <stdio.h>
#include <android/log.h>
#include <android/bitmap.h>
//包含ffmpeg庫頭文件,這些文件都直接方案jni目錄下
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#define LOG_TAG "FFMPEGSample"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
/* Cheat to keep things simple and just use some globals. */
//全局對象
AVFormatContext *pFormatCtx;
AVCodecContext *pCodecCtx;
AVFrame *pFrame;
AVFrame *pFrameRGB;
int videoStream;
/*
* Write a frame worth of video (in pFrame) into the Android bitmap
* described by info using the raw pixel buffer. It's a very inefficient
* draw routine, but it's easy to read. Relies on the format of the
* bitmap being 8bits per color component plus an 8bit alpha channel.
*/
//定義的靜態方法,將某幀AVFrame在Android的Bitmap中繪製
static void fill_bitmap(AndroidBitmapInfo* info, void *pixels, AVFrame *pFrame)
{
uint8_t *frameLine;
int yy;
for (yy = 0; yy < info->height; yy++) {
uint8_t* line = (uint8_t*)pixels;
frameLine = (uint8_t *)pFrame->data[0] + (yy * pFrame->linesize[0]);
int xx;
for (xx = 0; xx < info->width; xx++) {
int out_offset = xx * 4;
int in_offset = xx * 3;
line[out_offset] = frameLine[in_offset];
line[out_offset+1] = frameLine[in_offset+1];
line[out_offset+2] = frameLine[in_offset+2];
line[out_offset+3] = 0;
}
pixels = (char*)pixels + info->stride;
}
}
//定義java回調函數,相當與 com.churnlabs中的ffmpegsample中的MainActivity類中的openFile方法。
void Java_com_churnlabs_ffmpegsample_MainActivity_openFile(JNIEnv * env, jobject this)
{
int ret;
int err;
int i;
AVCodec *pCodec;
uint8_t *buffer;
int numBytes;
//注冊所有的函數
av_register_all();
LOGE("Registered formats");
//打開sdcard中的vid.3gp文件
err = av_open_input_file(&pFormatCtx, "file:/sdcard/vid.3gp", NULL, 0, NULL);
LOGE("Called open file");
if(err!=0) {
LOGE("Couldn't open file");
return;
}
LOGE("Opened file");
if(av_find_stream_info(pFormatCtx)<0) {
LOGE("Unable to get stream info");
return;
}
videoStream = -1;
//定義設置videoStream
for (i=0; i<pFormatCtx->nb_streams; i++) {
if(pFormatCtx->streams[i]->codec->codec_type==CODEC_TYPE_VIDEO) {
videoStream = i;
break;
}
}
if(videoStream==-1) {
LOGE("Unable to find video stream");
return;
}
LOGI("Video stream is [%d]", videoStream);
//定義編碼類型
pCodecCtx=pFormatCtx->streams[videoStream]->codec;
//獲取解碼器
pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
if(pCodec==NULL) {
LOGE("Unsupported codec");
return;
}
//使用特定的解碼器打開
if(avcodec_open(pCodecCtx, pCodec)<0) {
LOGE("Unable to open codec");
return;
}
//分配幀空間
pFrame=avcodec_alloc_frame();
//分配RGB幀空間
pFrameRGB=avcodec_alloc_frame();
LOGI("Video size is [%d x %d]", pCodecCtx->width, pCodecCtx->height);
//獲取大小
numBytes=avpicture_get_size(PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height);
分配空間
buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
avpicture_fill((AVPicture *)pFrameRGB, buffer, PIX_FMT_RGB24,
pCodecCtx->width, pCodecCtx->height);
}
//定義java回調函數,相當與 com.churnlabs中的ffmpegsample中的MainActivity類中的drawFrame方法。
void Java_com_churnlabs_ffmpegsample_MainActivity_drawFrame(JNIEnv * env, jobject this, jstring bitmap)
{
AndroidBitmapInfo info;
void* pixels;
int ret;
int err;
int i;
int frameFinished = 0;
AVPacket packet;
static struct SwsContext *img_convert_ctx;
int64_t seek_target;
if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) {
LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
return;
}
LOGE("Checked on the bitmap");
if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) {
LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
}
LOGE("Grabbed the pixels");
i = 0;
while((i==0) && (av_read_frame(pFormatCtx, &packet)>=0)) {
if(packet.stream_index==videoStream) {
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
if(frameFinished) {
LOGE("packet pts %llu", packet.pts);
// This is much different than the tutorial, sws_scale
// replaces img_convert, but it's not a complete drop in.
// This version keeps the image the same size but swaps to
// RGB24 format, which works perfect for PPM output.
int target_width = 320;
int target_height = 240;
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
pCodecCtx->pix_fmt,
target_width, target_height, PIX_FMT_RGB24, SWS_BICUBIC,
NULL, NULL, NULL);
if(img_convert_ctx == NULL) {
LOGE("could not initialize conversion context\n");
return;
}
sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);
// save_frame(pFrameRGB, target_width, target_height, i);
fill_bitmap(&info, pixels, pFrameRGB);
i = 1;
}
}
av_free_packet(&packet);
}
AndroidBitmap_unlockPixels(env, bitmap);
}
//內部調用函數,不對外,用來查找幀
int seek_frame(int tsms)
{
int64_t frame;
frame = av_rescale(tsms,pFormatCtx->streams[videoStream]->time_base.den,pFormatCtx->streams[videoStream]->time_base.num);
frame/=1000;
if(avformat_seek_file(pFormatCtx,videoStream,0,frame,frame,AVSEEK_FLAG_FRAME)<0) {
return 0;
}
avcodec_flush_buffers(pCodecCtx);
return 1;
}
//定義java回調函數,相當與 com.churnlabs中的ffmpegsample中的MainActivity類中的drawFrameAt方法。
void Java_com_churnlabs_ffmpegsample_MainActivity_drawFrameAt(JNIEnv * env, jobject this, jstring bitmap, jint secs)
{
AndroidBitmapInfo info;
void* pixels;
int ret;
int err;
int i;
int frameFinished = 0;
AVPacket packet;
static struct SwsContext *img_convert_ctx;
int64_t seek_target;
if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) {
LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
return;
}
LOGE("Checked on the bitmap");
if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) {
LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
}
LOGE("Grabbed the pixels");
seek_frame(secs * 1000);
i = 0;
while ((i== 0) && (av_read_frame(pFormatCtx, &packet)>=0)) {
if(packet.stream_index==videoStream) {
avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);
if(frameFinished) {
// This is much different than the tutorial, sws_scale
// replaces img_convert, but it's not a complete drop in.
// This version keeps the image the same size but swaps to
// RGB24 format, which works perfect for PPM output.
int target_width = 320;
int target_height = 240;
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
pCodecCtx->pix_fmt,
target_width, target_height, PIX_FMT_RGB24, SWS_BICUBIC,
NULL, NULL, NULL);
if(img_convert_ctx == NULL) {
LOGE("could not initialize conversion context\n");
return;
}
sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);
// save_frame(pFrameRGB, target_width, target_height, i);
fill_bitmap(&info, pixels, pFrameRGB);
i = 1;
}
}
av_free_packet(&packet);
}
AndroidBitmap_unlockPixels(env, bitmap);
}
3 撰寫相應的Android.mk文件
LOCAL_PATH := $(call my-dir) include $(CLEAR_VARS) LOCAL_MODULE := ffmpegutils LOCAL_SRC_FILES := native.c LOCAL_C_INCLUDES := $(LOCAL_PATH)/include LOCAL_LDLIBS := -L$(NDK_PLATFORMS_ROOT)/$(TARGET_PLATFORM)/arch-arm/usr/lib -L$(LOCAL_PATH) -lavformat -lavcodec -lavdevice -lavfilter -lavcore -lavutil -lswscale -llog -ljnigraphics -lz -ldl -lgcc include $(BUILD_SHARED_LIBRARY)
在Android.mk中有意個LOCAL_C_INCLUDES :=$(LOCAL_PATH)/include指明了相應的FFmpeg的頭文件路徑。故在代碼中包含
#include <libavcodec/avcodec.h> #include <libavformat/avformat.h> #include <libswscale/swscale.h>
就可以。
4 調用ndk-build,生成libffmpegutils.so文件,將這個文件拷貝到/root/develop/android-ndk-r6/platforms/android-8/arch-arm/usr/lib目錄,使得我們在下麵使用Android AVD2.2的時候,可以加載到這個so文件。
5 撰寫相應的Eclipse項目代碼,由於在native.c文件中指明了項目的工程名詞以及類名詞還有函數名詞,故我們的項目為com.churnlabs.ffmpegsample下麵的MainActivity.java文件
package com.churnlabs.ffmpegsample;
import android.app.Activity;
import android.graphics.Bitmap;
import android.os.Bundle;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.ImageView;
public class MainActivity extends Activity {
private static native void openFile();
private static native void drawFrame(Bitmap bitmap);
private static native void drawFrameAt(Bitmap bitmap, int secs);
private Bitmap mBitmap;
private int mSecs = 0;
static {
System.loadLibrary("ffmpegutils");
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//setContentView(new VideoView(this));
setContentView(R.layout.main);
mBitmap = Bitmap.createBitmap(320, 240, Bitmap.Config.ARGB_8888);
openFile();
Button btn = (Button)findViewById(R.id.frame_adv);
btn.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
drawFrame(mBitmap);
ImageView i = (ImageView)findViewById(R.id.frame);
i.setImageBitmap(mBitmap);
}
});
Button btn_fwd = (Button)findViewById(R.id.frame_fwd);
btn_fwd.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
mSecs += 5;
drawFrameAt(mBitmap, mSecs);
ImageView i = (ImageView)findViewById(R.id.frame);
i.setImageBitmap(mBitmap);
}
});
Button btn_back = (Button)findViewById(R.id.frame_back);
btn_back.setOnClickListener(new OnClickListener() {
public void onClick(View v) {
mSecs -= 5;
drawFrameAt(mBitmap, mSecs);
ImageView i = (ImageView)findViewById(R.id.frame);
i.setImageBitmap(mBitmap);
}
});
}
}
7 項目代碼下載:
https://github.com/churnlabs/android-ffmpeg-sample/zipball/master
參考:
1 https://github.com/churnlabs/android-ffmpeg-sample
2 https://www.360doc.com/content/10/1216/17/474846_78726683.shtml
3 https://github.com/prajnashi
最後更新:2017-04-02 06:52:01
上一篇:
《Java 本地接口規範》-JNI 的類型和數據結構
下一篇:
Race_Condition實驗
阿裏雲ECS、Redis再次降價 最高降幅35%
試用配置管理庫typesafe.config
回歸基礎性安全防護:Equifax(艾可飛)事件前車之鑒
深度解析Java8 – AbstractQueuedSynchronizer的實現分析(下)
適應多行長文本的Android TextView
the solution about "messy code" in elicpse
解決T4模板的程序集引用的五種方案
opencv split和merge操作
筆記:Ceph: A Scalable, High-Performance Distributed File System
AliSQL 20171010版本發布 Sequence兼容PostgreSQL/Oracle語法和升級TLSv1.2
